You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by mi...@apache.org on 2016/03/04 18:50:45 UTC

[01/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 4ce8323fc -> 3e48e84d3


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
index 9a60dce..fcaf416 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
@@ -34,1938 +34,1994 @@
 <span class="sourceLineNo">026</span>import java.util.concurrent.locks.Lock;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.27"></a>
 <span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.Cell;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.HConstants;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.io.IOUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>import com.google.common.annotations.VisibleForTesting;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import com.google.common.base.Preconditions;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>/**<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * Reading {@link HFile} version 1 and 2 blocks, and writing version 2 blocks.<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * &lt;ul&gt;<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * &lt;li&gt;In version 1 all blocks are always compressed or uncompressed, as<a name="line.59"></a>
-<span class="sourceLineNo">060</span> * specified by the {@link HFile}'s compression algorithm, with a type-specific<a name="line.60"></a>
-<span class="sourceLineNo">061</span> * magic record stored in the beginning of the compressed data (i.e. one needs<a name="line.61"></a>
-<span class="sourceLineNo">062</span> * to uncompress the compressed block to determine the block type). There is<a name="line.62"></a>
-<span class="sourceLineNo">063</span> * only a single compression algorithm setting for all blocks. Offset and size<a name="line.63"></a>
-<span class="sourceLineNo">064</span> * information from the block index are required to read a block.<a name="line.64"></a>
-<span class="sourceLineNo">065</span> * &lt;li&gt;In version 2 a block is structured as follows:<a name="line.65"></a>
-<span class="sourceLineNo">066</span> * &lt;ul&gt;<a name="line.66"></a>
-<span class="sourceLineNo">067</span> * &lt;li&gt;header (see Writer#finishBlock())<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * &lt;ul&gt;<a name="line.68"></a>
-<span class="sourceLineNo">069</span> * &lt;li&gt;Magic record identifying the block type (8 bytes)<a name="line.69"></a>
-<span class="sourceLineNo">070</span> * &lt;li&gt;Compressed block size, excluding header, including checksum (4 bytes)<a name="line.70"></a>
-<span class="sourceLineNo">071</span> * &lt;li&gt;Uncompressed block size, excluding header, excluding checksum (4 bytes)<a name="line.71"></a>
-<span class="sourceLineNo">072</span> * &lt;li&gt;The offset of the previous block of the same type (8 bytes). This is<a name="line.72"></a>
-<span class="sourceLineNo">073</span> * used to be able to navigate to the previous block without going to the block<a name="line.73"></a>
-<span class="sourceLineNo">074</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.74"></a>
-<span class="sourceLineNo">075</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.75"></a>
-<span class="sourceLineNo">076</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the size of data on disk, including header,<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * excluding checksums (4 bytes)<a name="line.77"></a>
-<span class="sourceLineNo">078</span> * &lt;/ul&gt;<a name="line.78"></a>
-<span class="sourceLineNo">079</span> * &lt;/li&gt;<a name="line.79"></a>
-<span class="sourceLineNo">080</span> * &lt;li&gt;Raw/Compressed/Encrypted/Encoded data. The compression algorithm is the<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * same for all the blocks in the {@link HFile}, similarly to what was done in<a name="line.81"></a>
-<span class="sourceLineNo">082</span> * version 1.<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * &lt;li&gt;For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * the number of bytes specified by bytesPerChecksum.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * &lt;/ul&gt;<a name="line.85"></a>
-<span class="sourceLineNo">086</span> * &lt;/ul&gt;<a name="line.86"></a>
-<span class="sourceLineNo">087</span> */<a name="line.87"></a>
-<span class="sourceLineNo">088</span>@InterfaceAudience.Private<a name="line.88"></a>
-<span class="sourceLineNo">089</span>public class HFileBlock implements Cacheable {<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>  /**<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * On a checksum failure on a Reader, these many suceeding read<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * requests switch back to using hdfs checksums before auto-reenabling<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * hbase checksum verification.<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final boolean FILL_HEADER = true;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  /**<a name="line.101"></a>
-<span class="sourceLineNo">102</span>   * The size of block header when blockType is {@link BlockType#ENCODED_DATA}.<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   * This extends normal header by adding the id of encoder.<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   */<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  public static final int ENCODED_HEADER_SIZE = HConstants.HFILEBLOCK_HEADER_SIZE<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      + DataBlockEncoding.ID_SIZE;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.108"></a>
-<span class="sourceLineNo">109</span>     new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>  // meta.usesHBaseChecksum+offset+nextBlockOnDiskSizeWithHeader<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      + Bytes.SIZEOF_LONG;<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  /**<a name="line.119"></a>
-<span class="sourceLineNo">120</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  static final CacheableDeserializer&lt;Cacheable&gt; blockDeserializer =<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.126"></a>
-<span class="sourceLineNo">127</span>            throws IOException {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>          buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();<a name="line.128"></a>
-<span class="sourceLineNo">129</span>          ByteBuff newByteBuffer;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          if (reuse) {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>            newByteBuffer = buf.slice();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>          } else {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>            // Used only in tests<a name="line.133"></a>
-<span class="sourceLineNo">134</span>            int len = buf.limit();<a name="line.134"></a>
-<span class="sourceLineNo">135</span>            newByteBuffer = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.135"></a>
-<span class="sourceLineNo">136</span>            newByteBuffer.put(0, buf, buf.position(), len);<a name="line.136"></a>
-<span class="sourceLineNo">137</span>          }<a name="line.137"></a>
-<span class="sourceLineNo">138</span>          buf.position(buf.limit());<a name="line.138"></a>
-<span class="sourceLineNo">139</span>          buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>          boolean usesChecksum = buf.get() == (byte)1;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>          HFileBlock hFileBlock = new HFileBlock(newByteBuffer, usesChecksum, memType);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>          hFileBlock.offset = buf.getLong();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>          hFileBlock.nextBlockOnDiskSizeWithHeader = buf.getInt();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>          if (hFileBlock.hasNextBlockHeader()) {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>            hFileBlock.buf.limit(hFileBlock.buf.limit() - hFileBlock.headerSize());<a name="line.145"></a>
-<span class="sourceLineNo">146</span>          }<a name="line.146"></a>
-<span class="sourceLineNo">147</span>          return hFileBlock;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>        @Override<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        public int getDeserialiserIdentifier() {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>          return deserializerIdentifier;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>        @Override<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>          // Used only in tests<a name="line.157"></a>
-<span class="sourceLineNo">158</span>          return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        }<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      };<a name="line.160"></a>
-<span class="sourceLineNo">161</span>  private static final int deserializerIdentifier;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  static {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    deserializerIdentifier = CacheableDeserializerIdManager<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        .registerDeserializer(blockDeserializer);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  }<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  /** Type of block. Header field 0. */<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private BlockType blockType;<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>  /** Size on disk excluding header, including checksum. Header field 1. */<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  private int onDiskSizeWithoutHeader;<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /** Size of pure data. Does not include header or checksums. Header field 2. */<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  private final int uncompressedSizeWithoutHeader;<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  /** The offset of the previous block on disk. Header field 3. */<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private final long prevBlockOffset;<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  /**<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final int onDiskDataSizeWithHeader;<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /** The in-memory representation of the hfile block */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private ByteBuff buf;<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span>  /** Meta data that holds meta information on the hfileblock */<a name="line.188"></a>
-<span class="sourceLineNo">189</span>  private HFileContext fileContext;<a name="line.189"></a>
+<span class="sourceLineNo">029</span>import org.apache.commons.logging.Log;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.commons.logging.LogFactory;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.fs.Path;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.Cell;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.HConstants;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.io.IOUtils;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>import com.google.common.annotations.VisibleForTesting;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import com.google.common.base.Preconditions;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>/**<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * Reads {@link HFile} version 1 and version 2 blocks but writes version 2 blocks only.<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * Version 2 was introduced in hbase-0.92.0. Does read and write out to the filesystem but also<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * the read and write to Cache.<a name="line.61"></a>
+<span class="sourceLineNo">062</span> *<a name="line.62"></a>
+<span class="sourceLineNo">063</span> * &lt;h3&gt;HFileBlock: Version 1&lt;/h3&gt;<a name="line.63"></a>
+<span class="sourceLineNo">064</span> * As of this writing, there should be no more version 1 blocks found out in the wild. Version 2<a name="line.64"></a>
+<span class="sourceLineNo">065</span> * as introduced in hbase-0.92.0.<a name="line.65"></a>
+<span class="sourceLineNo">066</span> * In version 1 all blocks are always compressed or uncompressed, as<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * specified by the {@link HFile}'s compression algorithm, with a type-specific<a name="line.67"></a>
+<span class="sourceLineNo">068</span> * magic record stored in the beginning of the compressed data (i.e. one needs<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * to uncompress the compressed block to determine the block type). There is<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * only a single compression algorithm setting for all blocks. Offset and size<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * information from the block index are required to read a block.<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * &lt;h3&gt;HFileBlock: Version 2&lt;/h3&gt;<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * In version 2, a block is structured as follows:<a name="line.73"></a>
+<span class="sourceLineNo">074</span> * &lt;ul&gt;<a name="line.74"></a>
+<span class="sourceLineNo">075</span> * &lt;li&gt;&lt;b&gt;Header:&lt;/b&gt; See Writer#putHeader(); header total size is HFILEBLOCK_HEADER_SIZE)<a name="line.75"></a>
+<span class="sourceLineNo">076</span> * &lt;ul&gt;<a name="line.76"></a>
+<span class="sourceLineNo">077</span> * &lt;li&gt;Magic record identifying the {@link BlockType} (8 bytes): e.g. &lt;code&gt;DATABLK*&lt;/code&gt;<a name="line.77"></a>
+<span class="sourceLineNo">078</span> * &lt;li&gt;Compressed -- a.k.a 'on disk' -- block size, excluding header, but including<a name="line.78"></a>
+<span class="sourceLineNo">079</span> *     tailing checksum bytes (4 bytes)<a name="line.79"></a>
+<span class="sourceLineNo">080</span> * &lt;li&gt;Uncompressed block size, excluding header, and excluding checksum bytes (4 bytes)<a name="line.80"></a>
+<span class="sourceLineNo">081</span> * &lt;li&gt;The offset of the previous block of the same type (8 bytes). This is<a name="line.81"></a>
+<span class="sourceLineNo">082</span> * used to navigate to the previous block without having to go to the block index<a name="line.82"></a>
+<span class="sourceLineNo">083</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.83"></a>
+<span class="sourceLineNo">084</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the size of data 'on disk', including header,<a name="line.85"></a>
+<span class="sourceLineNo">086</span> * excluding checksums (4 bytes)<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * &lt;/ul&gt;<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * &lt;/li&gt;<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * &lt;li&gt;&lt;b&gt;Raw/Compressed/Encrypted/Encoded data:&lt;/b&gt; The compression algorithm is the<a name="line.89"></a>
+<span class="sourceLineNo">090</span> * same for all the blocks in the {@link HFile}, similarly to what was done in<a name="line.90"></a>
+<span class="sourceLineNo">091</span> * version 1. If compression is NONE, this is just raw, serialized Cells.<a name="line.91"></a>
+<span class="sourceLineNo">092</span> * &lt;li&gt;&lt;b&gt;Tail:&lt;/b&gt; For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.92"></a>
+<span class="sourceLineNo">093</span> * the number of bytes specified by bytesPerChecksum.<a name="line.93"></a>
+<span class="sourceLineNo">094</span> * &lt;/ul&gt;<a name="line.94"></a>
+<span class="sourceLineNo">095</span> * &lt;p&gt;Be aware that when we read from HDFS, we overread pulling in the next blocks' header too.<a name="line.95"></a>
+<span class="sourceLineNo">096</span> * We do this to save having to do two seeks to read an HFileBlock; a seek to read the header<a name="line.96"></a>
+<span class="sourceLineNo">097</span> * to figure lengths, etc., and then another seek to pull in the data.<a name="line.97"></a>
+<span class="sourceLineNo">098</span> */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>@InterfaceAudience.Private<a name="line.99"></a>
+<span class="sourceLineNo">100</span>public class HFileBlock implements Cacheable {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final Log LOG = LogFactory.getLog(HFileBlock.class);<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  /**<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   * On a checksum failure, do these many succeeding read requests using hdfs checksums before<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * auto-reenabling hbase checksum verification.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   */<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.107"></a>
+<span class="sourceLineNo">108</span><a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static int UNSET = -1;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  public static final boolean FILL_HEADER = true;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.111"></a>
+<span class="sourceLineNo">112</span><a name="line.112"></a>
+<span class="sourceLineNo">113</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.115"></a>
+<span class="sourceLineNo">116</span><a name="line.116"></a>
+<span class="sourceLineNo">117</span>  /**<a name="line.117"></a>
+<span class="sourceLineNo">118</span>   * See #blockDeserializer method for more info.<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * 13 bytes of extra stuff stuck on the end of the HFileBlock that we pull in from HDFS (note,<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * when we read from HDFS, we pull in an HFileBlock AND the header of the next block if one).<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   * The 13 bytes are: usesHBaseChecksum (1 byte) + offset of this block (long) +<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * nextBlockOnDiskSizeWithHeader (int).<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   */<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public static final int EXTRA_SERIALIZATION_SPACE =<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT + Bytes.SIZEOF_LONG;<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>  /**<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  /**<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * Used deserializing blocks from Cache.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   *<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * Serializing to cache is a little hard to follow. See Writer#finishBlock for where it is done.<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   * When we start to append to a new HFileBlock,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>   * we skip over where the header should go before we start adding Cells. When the block is<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * done, we'll then go back and fill in the header and the checksum tail. Be aware that what<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   * gets serialized into the blockcache is a byte array that contains an HFileBlock followed by<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   * its checksums and then the header of the next HFileBlock (needed to help navigate), followed<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   * again by an extra 13 bytes of meta info needed when time to recreate the HFileBlock from cache.<a name="line.144"></a>
+<span class="sourceLineNo">145</span>   *<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   * ++++++++++++++<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   * + HFileBlock +<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   * ++++++++++++++<a name="line.148"></a>
+<span class="sourceLineNo">149</span>   * + Checksums  +<a name="line.149"></a>
+<span class="sourceLineNo">150</span>   * ++++++++++++++<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * + NextHeader +<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * ++++++++++++++<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * + ExtraMeta! +<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * ++++++++++++++<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   *<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * TODO: Fix it so we do NOT put the NextHeader into blockcache. It is not necessary.<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  static final CacheableDeserializer&lt;Cacheable&gt; blockDeserializer =<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.159"></a>
+<span class="sourceLineNo">160</span>        public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.160"></a>
+<span class="sourceLineNo">161</span>        throws IOException {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>          // Rewind to just before the EXTRA_SERIALIZATION_SPACE.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>          buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>          // Get a new buffer to pass the deserialized HFileBlock for it to 'own'.<a name="line.164"></a>
+<span class="sourceLineNo">165</span>          ByteBuff newByteBuffer;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          if (reuse) {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>            newByteBuffer = buf.slice();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          } else {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>            int len = buf.limit();<a name="line.169"></a>
+<span class="sourceLineNo">170</span>            newByteBuffer = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.170"></a>
+<span class="sourceLineNo">171</span>            newByteBuffer.put(0, buf, buf.position(), len);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>          }<a name="line.172"></a>
+<span class="sourceLineNo">173</span>          // Read out the EXTRA_SERIALIZATION_SPACE content and shove into our HFileBlock.<a name="line.173"></a>
+<span class="sourceLineNo">174</span>          buf.position(buf.limit());<a name="line.174"></a>
+<span class="sourceLineNo">175</span>          buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);<a name="line.175"></a>
+<span class="sourceLineNo">176</span>          boolean usesChecksum = buf.get() == (byte)1;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>          HFileBlock hFileBlock = new HFileBlock(newByteBuffer, usesChecksum, memType);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>          hFileBlock.offset = buf.getLong();<a name="line.178"></a>
+<span class="sourceLineNo">179</span>          hFileBlock.nextBlockOnDiskSizeWithHeader = buf.getInt();<a name="line.179"></a>
+<span class="sourceLineNo">180</span>          if (hFileBlock.hasNextBlockHeader()) {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>            hFileBlock.buf.limit(hFileBlock.buf.limit() - hFileBlock.headerSize());<a name="line.181"></a>
+<span class="sourceLineNo">182</span>          }<a name="line.182"></a>
+<span class="sourceLineNo">183</span>          return hFileBlock;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>        @Override<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        public int getDeserialiserIdentifier() {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>          return deserializerIdentifier;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>        }<a name="line.189"></a>
 <span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>  /**<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * The offset of this block in the file. Populated by the reader for<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * convenience of access. This offset is not part of the block header.<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  private long offset = -1;<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * The on-disk size of the next block, including the header, obtained by<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * peeking into the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the next block's<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * header, or -1 if unknown.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  private int nextBlockOnDiskSizeWithHeader = -1;<a name="line.202"></a>
+<span class="sourceLineNo">191</span>        @Override<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>          // Used only in tests<a name="line.193"></a>
+<span class="sourceLineNo">194</span>          return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>        }<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      };<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  private static final int deserializerIdentifier;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  static {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    deserializerIdentifier = CacheableDeserializerIdManager<a name="line.200"></a>
+<span class="sourceLineNo">201</span>        .registerDeserializer(blockDeserializer);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
 <span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * is mostly used when the block data has already been read and uncompressed,<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   * and is sitting in a byte buffer.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>   *<a name="line.210"></a>
-<span class="sourceLineNo">211</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.213"></a>
-<span class="sourceLineNo">214</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.214"></a>
-<span class="sourceLineNo">215</span>   * @param buf block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes) followed by<a name="line.215"></a>
-<span class="sourceLineNo">216</span>   *          uncompressed data. This<a name="line.216"></a>
-<span class="sourceLineNo">217</span>   * @param fillHeader when true, parse {@code buf} and override the first 4 header fields.<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * @param offset the file offset the block was read from<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.219"></a>
-<span class="sourceLineNo">220</span>   * @param fileContext HFile meta data<a name="line.220"></a>
-<span class="sourceLineNo">221</span>   */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      long prevBlockOffset, ByteBuff buf, boolean fillHeader, long offset,<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    this.blockType = blockType;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    this.buf = buf;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    this.offset = offset;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    this.fileContext = fileContext;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    if (fillHeader)<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      overwriteHeader();<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    this.buf.rewind();<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  }<a name="line.236"></a>
+<span class="sourceLineNo">204</span>  /** Type of block. Header field 0. */<a name="line.204"></a>
+<span class="sourceLineNo">205</span>  private BlockType blockType;<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  /**<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * Size on disk excluding header, including checksum. Header field 1.<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   */<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  private int onDiskSizeWithoutHeader;<a name="line.211"></a>
+<span class="sourceLineNo">212</span><a name="line.212"></a>
+<span class="sourceLineNo">213</span>  /**<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   * Size of pure data. Does not include header or checksums. Header field 2.<a name="line.214"></a>
+<span class="sourceLineNo">215</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.215"></a>
+<span class="sourceLineNo">216</span>   */<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  private final int uncompressedSizeWithoutHeader;<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  /**<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   * The offset of the previous block on disk. Header field 3.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   */<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  private final long prevBlockOffset;<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  /**<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private final int onDiskDataSizeWithHeader;<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  /** The in-memory representation of the hfile block */<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  private ByteBuff buf;<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>  /** Meta data that holds meta information on the hfileblock */<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private HFileContext fileContext;<a name="line.236"></a>
 <span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    this(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset,<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        new SingleByteBuff(buf), fillHeader, offset, onDiskDataSizeWithHeader, fileContext);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  }<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /**<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.246"></a>
-<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  HFileBlock(HFileBlock that) {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    this.blockType = that.blockType;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    this.prevBlockOffset = that.prevBlockOffset;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    this.buf = that.buf.duplicate();<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this.offset = that.offset;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    this.fileContext = that.fileContext;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
-<span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>  HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    this(new SingleByteBuff(b), usesHBaseChecksum);<a name="line.261"></a>
-<span class="sourceLineNo">262</span>  }<a name="line.262"></a>
-<span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * to that point.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum) throws IOException {<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this(b, usesHBaseChecksum, MemoryType.EXCLUSIVE);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  /**<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * to that point.<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum, MemoryType memType) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    b.rewind();<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    blockType = BlockType.read(b);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    onDiskSizeWithoutHeader = b.getInt();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    uncompressedSizeWithoutHeader = b.getInt();<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    prevBlockOffset = b.getLong();<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    HFileContextBuilder contextBuilder = new HFileContextBuilder();<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    contextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    if (usesHBaseChecksum) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>      contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      contextBuilder.withBytesPerCheckSum(b.getInt());<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      this.onDiskDataSizeWithHeader = b.getInt();<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    } else {<a name="line.292"></a>
-<span class="sourceLineNo">293</span>      contextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      contextBuilder.withBytesPerCheckSum(0);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader +<a name="line.295"></a>
-<span class="sourceLineNo">296</span>                                       HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    this.fileContext = contextBuilder.build();<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    this.memType = memType;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    buf = b;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    buf.rewind();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
-<span class="sourceLineNo">303</span><a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public BlockType getBlockType() {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    return blockType;<a name="line.305"></a>
+<span class="sourceLineNo">238</span>  /**<a name="line.238"></a>
+<span class="sourceLineNo">239</span>   * The offset of this block in the file. Populated by the reader for<a name="line.239"></a>
+<span class="sourceLineNo">240</span>   * convenience of access. This offset is not part of the block header.<a name="line.240"></a>
+<span class="sourceLineNo">241</span>   */<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  private long offset = UNSET;<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>  /**<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * The on-disk size of the next block, including the header, obtained by<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * peeking into the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the next block's<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   * header, or -1 if unknown.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>   */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  private int nextBlockOnDiskSizeWithHeader = UNSET;<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>  /**<a name="line.253"></a>
+<span class="sourceLineNo">254</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.254"></a>
+<span class="sourceLineNo">255</span>   * is used when the block data has already been read and uncompressed,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>   * and is sitting in a byte buffer.<a name="line.256"></a>
+<span class="sourceLineNo">257</span>   *<a name="line.257"></a>
+<span class="sourceLineNo">258</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.258"></a>
+<span class="sourceLineNo">259</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.259"></a>
+<span class="sourceLineNo">260</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.260"></a>
+<span class="sourceLineNo">261</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.261"></a>
+<span class="sourceLineNo">262</span>   * @param buf block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes) followed by<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   *          uncompressed data.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @param offset the file offset the block was read from<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * @param fileContext HFile meta data<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   */<a name="line.268"></a>
+<span class="sourceLineNo">269</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      long prevBlockOffset, ByteBuff buf, boolean fillHeader, long offset,<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.blockType = blockType;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    this.buf = buf;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    this.offset = offset;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    this.fileContext = fileContext;<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    if (fillHeader) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      overwriteHeader();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.buf.rewind();<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    this(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset,<a name="line.289"></a>
+<span class="sourceLineNo">290</span>        new SingleByteBuff(buf), fillHeader, offset, onDiskDataSizeWithHeader, fileContext);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>  /**<a name="line.293"></a>
+<span class="sourceLineNo">294</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.294"></a>
+<span class="sourceLineNo">295</span>   */<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  HFileBlock(HFileBlock that) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    this.blockType = that.blockType;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    this.prevBlockOffset = that.prevBlockOffset;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    this.buf = that.buf.duplicate();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    this.offset = that.offset;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.fileContext = that.fileContext;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;<a name="line.305"></a>
 <span class="sourceLineNo">306</span>  }<a name="line.306"></a>
 <span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  public short getDataBlockEncodingId() {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.311"></a>
-<span class="sourceLineNo">312</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    return buf.getShort(headerSize());<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  }<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>  /**<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @return the on-disk size of header + data part + checksum.<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   */<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  public int getOnDiskSizeWithHeader() {<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   */<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  public int getOnDiskSizeWithoutHeader() {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    return onDiskSizeWithoutHeader;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  }<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  /**<a name="line.331"></a>
-<span class="sourceLineNo">332</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   public int getUncompressedSizeWithoutHeader() {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    return uncompressedSizeWithoutHeader;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>  }<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   *         -1 if unknown<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public long getPrevBlockOffset() {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    return prevBlockOffset;<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * is modified as side-effect.<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   */<a name="line.349"></a>
-<span class="sourceLineNo">350</span>  private void overwriteHeader() {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    buf.rewind();<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    blockType.write(buf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    buf.putLong(prevBlockOffset);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span>  }<a name="line.361"></a>
-<span class="sourceLineNo">362</span><a name="line.362"></a>
-<span class="sourceLineNo">363</span>  /**<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * Returns a buffer that does not include the header or checksum.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   *<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.366"></a>
+<span class="sourceLineNo">308</span>  HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    this(new SingleByteBuff(b), usesHBaseChecksum);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * to that point.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum) throws IOException {<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    this(b, usesHBaseChecksum, MemoryType.EXCLUSIVE);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * to that point.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   */<a name="line.327"></a>
+<span class="sourceLineNo">328</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum, MemoryType memType) throws IOException {<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    b.rewind();<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    blockType = BlockType.read(b);<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    onDiskSizeWithoutHeader = b.getInt();<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    uncompressedSizeWithoutHeader = b.getInt();<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    prevBlockOffset = b.getLong();<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    HFileContextBuilder contextBuilder = new HFileContextBuilder();<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    contextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    if (usesHBaseChecksum) {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>      contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>      contextBuilder.withBytesPerCheckSum(b.getInt());<a name="line.338"></a>
+<span class="sourceLineNo">339</span>      this.onDiskDataSizeWithHeader = b.getInt();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    } else {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      contextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.341"></a>
+<span class="sourceLineNo">342</span>      contextBuilder.withBytesPerCheckSum(0);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      this.onDiskDataSizeWithHeader =<a name="line.343"></a>
+<span class="sourceLineNo">344</span>          onDiskSizeWithoutHeader + HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    this.fileContext = contextBuilder.build();<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    this.memType = memType;<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    buf = b;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    buf.rewind();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>  }<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>  public BlockType getBlockType() {<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    return blockType;<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  public short getDataBlockEncodingId() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.358"></a>
+<span class="sourceLineNo">359</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    }<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return buf.getShort(headerSize());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the on-disk size of header + data part + checksum.<a name="line.366"></a>
 <span class="sourceLineNo">367</span>   */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ByteBuff dup = this.buf.duplicate();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    dup.position(headerSize());<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    dup.limit(buf.limit() - totalChecksumBytes());<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    return dup.slice();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>  }<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>  /**<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * Returns the buffer this block stores internally. The clients must not<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * modify the buffer object. This method has to be public because it is used<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * filter lookup, but has to be used with caution. Checksum data is not<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * included in the returned buffer but header data is.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   *<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * @return the buffer of this block for read-only operations<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public ByteBuff getBufferReadOnly() {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    ByteBuff dup = this.buf.duplicate();<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    dup.limit(buf.limit() - totalChecksumBytes());<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    return dup.slice();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>  /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>   * Returns the buffer of this block, including header data. The clients must<a name="line.391"></a>
-<span class="sourceLineNo">392</span>   * not modify the buffer object. This method has to be public because it is<a name="line.392"></a>
-<span class="sourceLineNo">393</span>   * used in {@link org.apache.hadoop.hbase.io.hfile.bucket.BucketCache} to avoid buffer copy.<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   *<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @return the buffer with header and checksum included for read-only operations<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   */<a name="line.396"></a>
-<span class="sourceLineNo">397</span>  public ByteBuff getBufferReadOnlyWithHeader() {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    ByteBuff dup = this.buf.duplicate();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    return dup.slice();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>  /**<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   * Returns a byte buffer of this block, including header data and checksum, positioned at<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * the beginning of header. The underlying data array is not copied.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   *<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @return the byte buffer with header and checksum included<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  ByteBuff getBufferWithHeader() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    ByteBuff dupBuf = buf.duplicate();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    dupBuf.rewind();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    return dupBuf;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      String fieldName) throws IOException {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    if (valueFromBuf != valueFromField) {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.417"></a>
-<span class="sourceLineNo">418</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    }<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.422"></a>
-<span class="sourceLineNo">423</span>      throws IOException {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (valueFromBuf != valueFromField) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   * This function is primary for testing and debugging, and is not<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  void sanityCheck() throws IOException {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    buf.rewind();<a name="line.438"></a>
-<span class="sourceLineNo">439</span><a name="line.439"></a>
-<span class="sourceLineNo">440</span>    sanityCheckAssertion(BlockType.read(buf), blockType);<a name="line.440"></a>
-<span class="sourceLineNo">441</span><a name="line.441"></a>
-<span class="sourceLineNo">442</span>    sanityCheckAssertion(buf.getInt(), onDiskSizeWithoutHeader,<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        "onDiskSizeWithoutHeader");<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    sanityCheckAssertion(buf.getInt(), uncompressedSizeWithoutHeader,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        "uncompressedSizeWithoutHeader");<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>    sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset");<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.451"></a>
-<span class="sourceLineNo">452</span>          "bytesPerChecksum");<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    }<a name="line.454"></a>
-<span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>    int cksumBytes = totalChecksumBytes();<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    if (buf.limit() != expectedBufLimit) {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      throw new AssertionError("Expected buffer limit " + expectedBufLimit<a name="line.459"></a>
-<span class="sourceLineNo">460</span>          + ", got " + buf.limit());<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    int hdrSize = headerSize();<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    if (buf.capacity() != expectedBufLimit &amp;&amp;<a name="line.466"></a>
-<span class="sourceLineNo">467</span>        buf.capacity() != expectedBufLimit + hdrSize) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      throw new AssertionError("Invalid buffer capacity: " + buf.capacity() +<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    }<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>  @Override<a name="line.473"></a>
-<span class="sourceLineNo">474</span>  public String toString() {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    StringBuilder sb = new StringBuilder()<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      .append("HFileBlock [")<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      .append(" fileOffset=").append(offset)<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      .append(" headerSize()=").append(headerSize())<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      .append(" blockType=").append(blockType)<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      .append(" onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      .append(" uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      .append(" prevBlockOffset=").append(prevBlockOffset)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      .append(" isUseHBaseChecksum()=").append(fileContext.isUseHBaseChecksum());<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      sb.append(" checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        .append(" bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        .append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    } else {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      sb.append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.490"></a>
-<span class="sourceLineNo">491</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    String dataBegin = null;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    if (buf.hasArray()) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    } else {<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    }<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    sb.append(" getOnDiskSizeWithHeader()=").append(getOnDiskSizeWithHeader())<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      .append(" totalChecksumBytes()=").append(totalChecksumBytes())<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      .append(" isUnpacked()=").append(isUnpacked())<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      .append(" buf=[ ").append(buf).append(" ]")<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      .append(" dataBeginsWith=").append(dataBegin)<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      .append(" fileContext=").append(fileContext)<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      .append(" ]");<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    return sb.toString();<a name="line.511"></a>
-<span class="sourceLineNo">512</span>  }<a name="line.512"></a>
-<span class="sourceLineNo">513</span><a name="line.513"></a>
-<span class="sourceLineNo">514</span>  /**<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * Called after reading a block with provided onDiskSizeWithHeader.<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   */<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  private void validateOnDiskSizeWithoutHeader(int expectedOnDiskSizeWithoutHeader)<a name="line.517"></a>
-<span class="sourceLineNo">518</span>  throws IOException {<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    if (onDiskSizeWithoutHeader != expectedOnDiskSizeWithoutHeader) {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      String dataBegin = null;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>      if (buf.hasArray()) {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>        dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset(), Math.min(32, buf.limit()));<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      } else {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        ByteBuff bufDup = getBufferReadOnly();<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        byte[] dataBeginBytes = new byte[Math.min(32, bufDup.limit() - bufDup.position())];<a name="line.525"></a>
-<span class="sourceLineNo">526</span>        bufDup.get(dataBeginBytes);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String blockInfoMsg =<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        "Block offset: " + offset + ", data starts with: " + dataBegin;<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new IOException("On-disk size without header provided is "<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + expectedOnDiskSizeWithoutHeader + ", but block "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>          + "header contains " + onDiskSizeWithoutHeader + ". " +<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          blockInfoMsg);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  }<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>  /**<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      // encryption details.<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      return this;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span><a name="line.549"></a>
-<span class="sourceLineNo">550</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.551"></a>
-<span class="sourceLineNo">552</span><a name="line.552"></a>
-<span class="sourceLineNo">553</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.553"></a>
-<span class="so

<TRUNCATED>

[10/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
index 8fd15a0..da22771 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
@@ -186,741 +186,742 @@
 <span class="sourceLineNo">178</span>   * The number of bytes per checksum.<a name="line.178"></a>
 <span class="sourceLineNo">179</span>   */<a name="line.179"></a>
 <span class="sourceLineNo">180</span>  public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  // For measuring number of checksum failures<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  static final Counter checksumFailures = new Counter();<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  // for test purpose<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static final Counter dataBlockReadCnt = new Counter();<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Number of checksum verification failures. It also<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * clears the counter.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  public static final long getChecksumFailuresCount() {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    long count = checksumFailures.get();<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    checksumFailures.set(0);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    return count;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /** API required to write an {@link HFile} */<a name="line.197"></a>
-<span class="sourceLineNo">198</span>  public interface Writer extends Closeable {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    /** Add an element to the file info map. */<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>    void append(Cell cell) throws IOException;<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>    /** @return the path to this {@link HFile} */<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    Path getPath();<a name="line.208"></a>
-<span class="sourceLineNo">209</span><a name="line.209"></a>
-<span class="sourceLineNo">210</span>    /**<a name="line.210"></a>
-<span class="sourceLineNo">211</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.211"></a>
-<span class="sourceLineNo">212</span>     * a compound Bloom filter writer.<a name="line.212"></a>
-<span class="sourceLineNo">213</span>     */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    /**<a name="line.223"></a>
-<span class="sourceLineNo">224</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.224"></a>
-<span class="sourceLineNo">225</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.225"></a>
-<span class="sourceLineNo">226</span>     * in HFile version 1 and version 2.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>     */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>    /**<a name="line.230"></a>
-<span class="sourceLineNo">231</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.231"></a>
-<span class="sourceLineNo">232</span>     * HFile V2.<a name="line.232"></a>
-<span class="sourceLineNo">233</span>     */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>    /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>     * Return the file context for the HFile this writer belongs to<a name="line.237"></a>
-<span class="sourceLineNo">238</span>     */<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    HFileContext getFileContext();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * we want to be able to swap writer implementations.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static class WriterFactory {<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    protected final Configuration conf;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    protected final CacheConfig cacheConf;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    protected FileSystem fs;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    protected Path path;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    protected FSDataOutputStream ostream;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    protected CellComparator comparator = <a name="line.252"></a>
-<span class="sourceLineNo">253</span>        CellComparator.COMPARATOR;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    protected InetSocketAddress[] favoredNodes;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    private HFileContext fileContext;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    protected boolean shouldDropBehind = false;<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.conf = conf;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.cacheConf = cacheConf;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span><a name="line.262"></a>
-<span class="sourceLineNo">263</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      Preconditions.checkNotNull(fs);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(path);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      this.fs = fs;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.path = path;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return this;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      Preconditions.checkNotNull(ostream);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      this.ostream = ostream;<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return this;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      Preconditions.checkNotNull(comparator);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      this.comparator = comparator;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      return this;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      // Deliberately not checking for null here.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      this.favoredNodes = favoredNodes;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return this;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      this.fileContext = fileContext;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      return this;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>      return this;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  // For measuring number of checksum failures<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  static final Counter CHECKSUM_FAILURES = new Counter();<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>  // For tests. Gets incremented when we read a block whether from HDFS or from Cache.<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  public static final Counter DATABLOCK_READ_COUNT = new Counter();<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /**<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * Number of checksum verification failures. It also<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * clears the counter.<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final long getChecksumFailuresCount() {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    long count = CHECKSUM_FAILURES.get();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    CHECKSUM_FAILURES.set(0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    return count;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  /** API required to write an {@link HFile} */<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  public interface Writer extends Closeable {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    /** Add an element to the file info map. */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>    void append(Cell cell) throws IOException;<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>    /** @return the path to this {@link HFile} */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    Path getPath();<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.212"></a>
+<span class="sourceLineNo">213</span>     * a compound Bloom filter writer.<a name="line.213"></a>
+<span class="sourceLineNo">214</span>     */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span><a name="line.216"></a>
+<span class="sourceLineNo">217</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>    /**<a name="line.224"></a>
+<span class="sourceLineNo">225</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.225"></a>
+<span class="sourceLineNo">226</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.226"></a>
+<span class="sourceLineNo">227</span>     * in HFile version 1 and version 2.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>     */<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    /**<a name="line.231"></a>
+<span class="sourceLineNo">232</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.232"></a>
+<span class="sourceLineNo">233</span>     * HFile V2.<a name="line.233"></a>
+<span class="sourceLineNo">234</span>     */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    /**<a name="line.237"></a>
+<span class="sourceLineNo">238</span>     * Return the file context for the HFile this writer belongs to<a name="line.238"></a>
+<span class="sourceLineNo">239</span>     */<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    HFileContext getFileContext();<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /**<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * we want to be able to swap writer implementations.<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  public static class WriterFactory {<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    protected final Configuration conf;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    protected final CacheConfig cacheConf;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    protected FileSystem fs;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    protected Path path;<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    protected FSDataOutputStream ostream;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    protected CellComparator comparator = <a name="line.253"></a>
+<span class="sourceLineNo">254</span>        CellComparator.COMPARATOR;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    protected InetSocketAddress[] favoredNodes;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    private HFileContext fileContext;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    protected boolean shouldDropBehind = false;<a name="line.257"></a>
+<span class="sourceLineNo">258</span><a name="line.258"></a>
+<span class="sourceLineNo">259</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.conf = conf;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.cacheConf = cacheConf;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(fs);<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      Preconditions.checkNotNull(path);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      this.fs = fs;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      this.path = path;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      return this;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span><a name="line.271"></a>
+<span class="sourceLineNo">272</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      Preconditions.checkNotNull(ostream);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      this.ostream = ostream;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      return this;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      Preconditions.checkNotNull(comparator);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      this.comparator = comparator;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      // Deliberately not checking for null here.<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      this.favoredNodes = favoredNodes;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      return this;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      this.fileContext = fileContext;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      return this;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      return this;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
 <span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    public Writer create() throws IOException {<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            "filesystem/path or path");<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      if (path != null) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        try {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (UnsupportedOperationException uoe) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /** The configuration key for HFile version to use for new files */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static int getFormatVersion(Configuration conf) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    checkFormatVersion(version);<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    return version;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
-<span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  /**<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * Disables block cache access for all writers created through the<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * returned factory.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.332"></a>
-<span class="sourceLineNo">333</span>       conf) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Configuration tempConf = new Configuration(conf);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  /**<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      CacheConfig cacheConf) {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    int version = getFormatVersion(conf);<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    switch (version) {<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    case 2:<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.347"></a>
-<span class="sourceLineNo">348</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        "in hbase-site.xml)");<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    case 3:<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    default:<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.354"></a>
-<span class="sourceLineNo">355</span>          "format version " + version);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * An abstraction used by the block index.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  public interface CachingBlockReader {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    /**<a name="line.365"></a>
-<span class="sourceLineNo">366</span>     * Read in a file block.<a name="line.366"></a>
-<span class="sourceLineNo">367</span>     * @param offset offset to read.<a name="line.367"></a>
-<span class="sourceLineNo">368</span>     * @param onDiskBlockSize size of the block<a name="line.368"></a>
-<span class="sourceLineNo">369</span>     * @param cacheBlock<a name="line.369"></a>
-<span class="sourceLineNo">370</span>     * @param pread<a name="line.370"></a>
-<span class="sourceLineNo">371</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.371"></a>
-<span class="sourceLineNo">372</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.372"></a>
-<span class="sourceLineNo">373</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.373"></a>
-<span class="sourceLineNo">374</span>     *  caching efficiency of encoded data blocks)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.375"></a>
-<span class="sourceLineNo">376</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.376"></a>
-<span class="sourceLineNo">377</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.377"></a>
-<span class="sourceLineNo">378</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>     * @return Block wrapped in a ByteBuffer.<a name="line.379"></a>
-<span class="sourceLineNo">380</span>     * @throws IOException<a name="line.380"></a>
-<span class="sourceLineNo">381</span>     */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.382"></a>
-<span class="sourceLineNo">383</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        throws IOException;<a name="line.386"></a>
-<span class="sourceLineNo">387</span><a name="line.387"></a>
-<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
-<span class="sourceLineNo">389</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.389"></a>
-<span class="sourceLineNo">390</span>     * @param block Block to be returned.<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     */<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    void returnBlock(HFileBlock block);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  }<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    /**<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * write.<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     */<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    String getName();<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>    CellComparator getComparator();<a name="line.404"></a>
-<span class="sourceLineNo">405</span><a name="line.405"></a>
-<span class="sourceLineNo">406</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    Cell getLastKey();<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    Cell midkey() throws IOException;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>    long length();<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>    long getEntries();<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>    Cell getFirstKey();<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    long indexSize();<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>    byte[] getFirstRowKey();<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>    byte[] getLastRowKey();<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    FixedFileTrailer getTrailer();<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>    /**<a name="line.436"></a>
-<span class="sourceLineNo">437</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.437"></a>
-<span class="sourceLineNo">438</span>     * {@link HFile} version.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>     * Knows nothing about how that metadata is structured.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>     */<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * {@link HFile}  version.<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * Knows nothing about how that metadata is structured.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     */<a name="line.447"></a>
-<span class="sourceLineNo">448</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.448"></a>
-<span class="sourceLineNo">449</span><a name="line.449"></a>
-<span class="sourceLineNo">450</span>    Path getPath();<a name="line.450"></a>
-<span class="sourceLineNo">451</span><a name="line.451"></a>
-<span class="sourceLineNo">452</span>    /** Close method with optional evictOnClose */<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    void close(boolean evictOnClose) throws IOException;<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.455"></a>
-<span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    boolean hasMVCCInfo();<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>     * Return the file context of the HFile this reader belongs to<a name="line.460"></a>
-<span class="sourceLineNo">461</span>     */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    HFileContext getFileContext();<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    <a name="line.463"></a>
-<span class="sourceLineNo">464</span>    boolean isPrimaryReplicaReader();<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    <a name="line.465"></a>
-<span class="sourceLineNo">466</span>    void setPrimaryReplicaReader(boolean isPrimaryReplicaReader);<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    boolean shouldIncludeMemstoreTS();<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>    boolean isDecodeMemstoreTS();<a name="line.470"></a>
-<span class="sourceLineNo">471</span><a name="line.471"></a>
-<span class="sourceLineNo">472</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @VisibleForTesting<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    @VisibleForTesting<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    boolean prefetchComplete();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>  }<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  /**<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * Method returns the reader given the specified arguments.<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   *<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * @param path hfile's path<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * @param fsdis stream of path's file<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * @param size max size of the trailer.<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * @param hfs<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * @return an appropriate instance of HFileReader<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   */<a name="line.492"></a>
-<span class="sourceLineNo">493</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      justification="Intentional")<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  private static Reader pickReaderVersion(Path path, FSDataInputStreamWrapper fsdis,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      long size, CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FixedFileTrailer trailer = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    try {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      switch (trailer.getMajorVersion()) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      case 2:<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        LOG.debug("Opening HFile v2 with v3 reader");<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      case 3 :<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs, conf);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      default:<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (Throwable t) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      try {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        fsdis.close();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      } catch (Throwable t2) {<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper", t2);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    }<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  /**<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param fs A file system<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param path Path to HFile<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @param fsdis a stream of path's file<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @param size max size of the trailer.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * @param conf Configuration<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * @return A version specific Hfile Reader<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("resource")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  public static Reader createReader(FileSystem fs, Path path,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      FSDataInputStreamWrapper fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      throws IOException {<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    HFileSystem hfs = null;<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // the filesystem.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    if (!(fs instanceof HFileSystem)) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      hfs = new HFileSystem(fs);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } else {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      hfs = (HFileSystem)fs;<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    return pickReaderVersion(path, fsdis, size, cacheConf, hfs, conf);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   *<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * @param fs filesystem<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * @param path Path to file to read<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * @param cacheConf This must not be null.  @see {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * @return an active Reader instance<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   */<a name="line.556"></a>
-<span class="sourceLineNo">557</span>  public static Reader createReader(<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(),<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      cacheConf, stream.getHfs(), conf);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>  /**<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * This factory method is used only by unit tests<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  static Reader createReaderFromStream(Path path,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      FSDataInputStream fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    return pickReaderVersion(path, wrapper, size, cacheConf, null, conf);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  /**<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * @param fs filesystem<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   * @param path Path to file to verify<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   * @throws IOException if failed to read from the underlying stream<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
-<span class="sourceLineNo">582</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   * @param fs filesystem<a name="line.588"></a>
-<span class="sourceLineNo">589</span>   * @param fileStatus the file to verify<a name="line.589"></a>
-<span class="sourceLineNo">590</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * @throws IOException if failed to read from the underlying stream<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   */<a name="line.592"></a>
-<span class="sourceLineNo">593</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      throws IOException {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    final Path path = fileStatus.getPath();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    final long size = fileStatus.getLen();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    try {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.599"></a>
-<span class="sourceLineNo">600</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      return true;<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    } catch (IllegalArgumentException e) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      return false;<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    } catch (IOException e) {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      throw e;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } finally {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      try {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        fsdis.close();<a name="line.609"></a>
-<span class="sourceLineNo">610</span>      } catch (Throwable t) {<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper: " + path, t);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      }<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>  }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>  /**<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;byte [], byte []&gt;(Bytes.BYTES_COMPARATOR);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    public FileInfo() {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      super();<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
-<span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * key prefix.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     *<a name="line.638"></a>
-<span class="sourceLineNo">639</span>     * @param k key to add<a name="line.639"></a>
-<span class="sourceLineNo">640</span>     * @param v value to add<a name="line.640"></a>
-<span class="sourceLineNo">641</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     *          with the reserved prefix<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     * @return this file info object<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     * @throws IOException if the key or value is invalid<a name="line.644"></a>
-<span class="sourceLineNo">645</span>     */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        final boolean checkPrefix) throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      if (k == null || v == null) {<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        throw new NullPointerException("Key nor value may be null");<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      }<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.652"></a>
-<span class="sourceLineNo">653</span>            + " are reserved");<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      put(k, v);<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return this;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span><a name="line.658"></a>
-<span class="sourceLineNo">659</span>    public void clear() {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      this.map.clear();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span><a name="line.662"></a>
-<span class="sourceLineNo">663</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>      return map.comparator();<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    }<a name="line.665"></a>
-<span class="sourceLineNo">666</span><a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public boolean containsKey(Object key) {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return map.containsKey(key);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    public boolean containsValue(Object value) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      return map.containsValue(value);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      return map.entrySet();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    public boolean equals(Object o) {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      return map.equals(o);<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span><a name="line.682"></a>
-<span class="sourceLineNo">683</span>    public byte[] firstKey() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      return map.firstKey();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
-<span class="sourceLineNo">686</span><a name="line.686"></a>
-<span class="sourceLineNo">687</span>    public byte[] get(Object key) {<a name="line.687"></a>
-<span class="sourceLineNo">688</span>      return map.get(key);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>    }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public int hashCode() {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      return map.hashCode();<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>      return this.map.headMap(toKey);<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    }<a name="line.697"></a>
-<span class="sourceLineNo">698</span><a name="line.698"></a>
-<span class="sourceLineNo">699</span>    public boolean isEmpty() {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return map.isEmpty();<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      return map.keySet();<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>    public byte[] lastKey() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>      return map.lastKey();<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    }<a name="line.709"></a>
-<span class="sourceLineNo">710</span><a name="line.710"></a>
-<span class="sourceLineNo">711</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      return this.map.put(key, value);<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    }<a name="line.713"></a>
-<span class="sourceLineNo">714</span><a name="line.714"></a>
-<span class="sourceLineNo">715</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.map.putAll(m);<a name="line.716"></a>
-<span class="sourceLineNo">717</span>    }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public byte[] remove(Object key) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return this.map.remove(key);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    public int size() {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      return map.size();<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    }<a name="line.725"></a>
-<span class="sourceLineNo">726</span><a name="line.726"></a>
-<span class="sourceLineNo">727</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return this.map.subMap(fromKey, toKey);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      return this.map.tailMap(fromKey);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    }<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>    public Collection&lt;byte[]&gt; values() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      return map.values();<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>    /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.740"></a>
-<span class="sourceLineNo">741</span>     * We write it as a protobuf.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>     * @param out<a name="line.742"></a>
-<span class="sourceLineNo">743</span>     * @throws IOException<a name="line.743"></a>
-<span class="sourceLineNo">744</span>     * @see #read(DataInputStream)<a name="line.744"></a>
-<span class="sourceLineNo">745</span>     */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    void write(final DataOutputStream out) throws IOException {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>        bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));<a name="line.751"></a>
-<span class="sourceLineNo">752</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      }<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      builder.build().writeDelimitedTo(out);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.759"></a>
-<span class="sourceLineNo">760</span>     * Can deserialize protobuf of old Writables format.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>     * @param in<a name="line.761"></a>
-<span class="sourceLineNo">762</span>     * @throws IOException<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * @see #write(DataOutputStream)<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    void read(final DataInputStream in) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      byte [] pbuf = new byte[pblen];<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (in.markSupported()) in.mark(pblen);<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      int read = in.read(pbuf);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      } else {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>        if (in.markSupported()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>          in.reset();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>          parseWritable(in);<a name="line.777"></a>
-<span class="sourceLineNo">778</span>        } else {<a name="line.778"></a>
-<span class="sourceLineNo">779</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.782"></a>
-<span class="sourceLineNo">783</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.783"></a>
-<span class="sourceLineNo">784</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.784"></a>
-<span class="sourceLineNo">785</span>          parseWritable(new DataInputStream(sis));<a name="line.785"></a>
-<span class="sourceLineNo">786</span>        }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.790"></a>
-<span class="sourceLineNo">791</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.791"></a>
-<span class="sourceLineNo">792</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>     * @throws IOException<a name="line.793"></a>
-<span class="sourceLineNo">794</span>     */<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      this.map.clear();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      // Read the number of entries in the map<a name="line.798"></a>
-<span class="sourceLineNo">799</span>      int entries = in.readInt();<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      // Then read each key/value pair<a name="line.800"></a>
-<span class="sourceLineNo">801</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        byte [] key = Bytes.readByteArray(in);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        in.readByte();<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        byte [] value = Bytes.readByteArray(in);<a name="line.805"></a>
-<span class="sourceLineNo">806</span>        this.map.put(key, value);<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>    }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span>    /**<a name="line.810"></a>
-<span class="sourceLineNo">811</span>     * Fill our map with content of the pb we read off disk<a name="line.811"></a>
-<span class="sourceLineNo">812</span>     * @param fip protobuf message to read<a name="line.812"></a>
-<span class="sourceLineNo">813</span>     */<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      this.map.clear();<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    }<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.822"></a>
-<span class="sourceLineNo">823</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.824"></a>
-<span class="sourceLineNo">825</span>  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span><a name="line.826"></a>
-<span class="sourceLineNo">827</span>  /**<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * HFile.Writer.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   *<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * @return Array of strings, each represents a supported compression<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   *         supported.<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   *         &lt;ul&gt;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>   *         &lt;li&gt;"none" - No compression.<a name="line.835"></a>
-<span class="sourceLineNo">836</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   *         &lt;/ul&gt;<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   */<a name="line.838"></a>
-<span class="sourceLineNo">839</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    return Compression.getSupportedAlgorithms();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
-<span class="sourceLineNo">842</span><a name="line.842"></a>
-<span class="sourceLineNo">843</span>  // Utility methods.<a name="line.843"></a>
-<span class="sourceLineNo">844</span>  /*<a name="line.844"></a>
-<span class="sourceLineNo">845</span>   * @param l Long to convert to an int.<a name="line.845"></a>
-<span class="sourceLineNo">846</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
-<span class="sourceLineNo">848</span>  static int longToInt(final long l) {<a name="line.848"></a>
-<span class="sourceLineNo">849</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.851"></a>
-<span class="sourceLineNo">852</span>  }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>  /**<a name="line.854"></a>
-<span class="sourceLineNo">855</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.855"></a>
-<span class="sourceLineNo">856</span>   * empty list.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>   *<a name="line.857"></a>
-<span class="sourceLineNo">858</span>   * @param fs  The file system reference.<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * @param regionDir  The region directory to scan.<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * @return The list of files found.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * @throws IOException When scanning the files fails.<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   */<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws IOException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;Path&gt;();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for(FileStatus dir : familyDirs) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      for (FileStatus file : files) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        if (!file.isDirectory() &amp;&amp;<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          regionHFiles.add(file.getPath());<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>    }<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    return regionHFiles;<a name="line.878"></a>
-<span class="sourceLineNo">879</span>  }<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>  /**<a name="line.881"></a>
-<span class="sourceLineNo">882</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.882"></a>
-<span class="sourceLineNo">883</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.883"></a>
-<span class="sourceLineNo">884</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.884"></a>
-<span class="sourceLineNo">885</span>   * indicate that this is not a software error, but corrupted input.<a name="line.885"></a>
-<span class="sourceLineNo">886</span>   *<a name="line.886"></a>
-<span class="sourceLineNo">887</span>   * @param version an HFile version<a name="line.887"></a>
-<span class="sourceLineNo">888</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   */<a name="line.889"></a>
-<span class="sourceLineNo">890</span>  public static void checkFormatVersion(int version)<a name="line.890"></a>
-<span class="sourceLineNo">891</span>      throws IllegalArgumentException {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.894"></a>
-<span class="sourceLineNo">895</span>          + MAX_FORMAT_VERSION + ")");<a name="line.895"></a>
-<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
-<span class="sourceLineNo">898</span><a name="line.898"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>    public Writer create() throws IOException {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.303"></a>
+<span class="sourceLineNo">304</span>            "filesystem/path or path");<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      if (path != null) {<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        try {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        } catch (UnsupportedOperationException uoe) {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        }<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      }<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    }<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  /** The configuration key for HFile version to use for new files */<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  public static int getFormatVersion(Configuration conf) {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    checkFormatVersion(version);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return version;<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * Disables block cache access for all writers created through the<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * returned factory.<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   */<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.333"></a>
+<span class="sourceLineNo">334</span>       conf) {<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    Configuration tempConf = new Configuration(conf);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  /**<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      CacheConfig cacheConf) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    int version = getFormatVersion(conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    switch (version) {<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    case 2:<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.350"></a>
+<span class="sourceLineNo">351</span>        "in hbase-site.xml)");<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    case 3:<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    default:<a name="line.354"></a>
+<span class="sourceLineNo">355</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          "format version " + version);<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  /**<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * An abstraction used by the block index.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public interface CachingBlockReader {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>     * Read in a file block.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>     * @param offset offset to read.<a name="line.368"></a>
+<span class="sourceLineNo">369</span>     * @param onDiskBlockSize size of the block<a name="line.369"></a>
+<span class="sourceLineNo">370</span>     * @param cacheBlock<a name="line.370"></a>
+<span class="sourceLineNo">371</span>     * @param pread<a name="line.371"></a>
+<span class="sourceLineNo">372</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.372"></a>
+<span class="

<TRUNCATED>

[08/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
index 8fd15a0..da22771 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
@@ -186,741 +186,742 @@
 <span class="sourceLineNo">178</span>   * The number of bytes per checksum.<a name="line.178"></a>
 <span class="sourceLineNo">179</span>   */<a name="line.179"></a>
 <span class="sourceLineNo">180</span>  public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  // For measuring number of checksum failures<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  static final Counter checksumFailures = new Counter();<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  // for test purpose<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static final Counter dataBlockReadCnt = new Counter();<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Number of checksum verification failures. It also<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * clears the counter.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  public static final long getChecksumFailuresCount() {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    long count = checksumFailures.get();<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    checksumFailures.set(0);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    return count;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /** API required to write an {@link HFile} */<a name="line.197"></a>
-<span class="sourceLineNo">198</span>  public interface Writer extends Closeable {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    /** Add an element to the file info map. */<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>    void append(Cell cell) throws IOException;<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>    /** @return the path to this {@link HFile} */<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    Path getPath();<a name="line.208"></a>
-<span class="sourceLineNo">209</span><a name="line.209"></a>
-<span class="sourceLineNo">210</span>    /**<a name="line.210"></a>
-<span class="sourceLineNo">211</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.211"></a>
-<span class="sourceLineNo">212</span>     * a compound Bloom filter writer.<a name="line.212"></a>
-<span class="sourceLineNo">213</span>     */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    /**<a name="line.223"></a>
-<span class="sourceLineNo">224</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.224"></a>
-<span class="sourceLineNo">225</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.225"></a>
-<span class="sourceLineNo">226</span>     * in HFile version 1 and version 2.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>     */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>    /**<a name="line.230"></a>
-<span class="sourceLineNo">231</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.231"></a>
-<span class="sourceLineNo">232</span>     * HFile V2.<a name="line.232"></a>
-<span class="sourceLineNo">233</span>     */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>    /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>     * Return the file context for the HFile this writer belongs to<a name="line.237"></a>
-<span class="sourceLineNo">238</span>     */<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    HFileContext getFileContext();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * we want to be able to swap writer implementations.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static class WriterFactory {<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    protected final Configuration conf;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    protected final CacheConfig cacheConf;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    protected FileSystem fs;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    protected Path path;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    protected FSDataOutputStream ostream;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    protected CellComparator comparator = <a name="line.252"></a>
-<span class="sourceLineNo">253</span>        CellComparator.COMPARATOR;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    protected InetSocketAddress[] favoredNodes;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    private HFileContext fileContext;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    protected boolean shouldDropBehind = false;<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.conf = conf;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.cacheConf = cacheConf;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span><a name="line.262"></a>
-<span class="sourceLineNo">263</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      Preconditions.checkNotNull(fs);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(path);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      this.fs = fs;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.path = path;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return this;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      Preconditions.checkNotNull(ostream);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      this.ostream = ostream;<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return this;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      Preconditions.checkNotNull(comparator);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      this.comparator = comparator;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      return this;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      // Deliberately not checking for null here.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      this.favoredNodes = favoredNodes;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return this;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      this.fileContext = fileContext;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      return this;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>      return this;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  // For measuring number of checksum failures<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  static final Counter CHECKSUM_FAILURES = new Counter();<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>  // For tests. Gets incremented when we read a block whether from HDFS or from Cache.<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  public static final Counter DATABLOCK_READ_COUNT = new Counter();<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /**<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * Number of checksum verification failures. It also<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * clears the counter.<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final long getChecksumFailuresCount() {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    long count = CHECKSUM_FAILURES.get();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    CHECKSUM_FAILURES.set(0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    return count;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  /** API required to write an {@link HFile} */<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  public interface Writer extends Closeable {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    /** Add an element to the file info map. */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>    void append(Cell cell) throws IOException;<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>    /** @return the path to this {@link HFile} */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    Path getPath();<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.212"></a>
+<span class="sourceLineNo">213</span>     * a compound Bloom filter writer.<a name="line.213"></a>
+<span class="sourceLineNo">214</span>     */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span><a name="line.216"></a>
+<span class="sourceLineNo">217</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>    /**<a name="line.224"></a>
+<span class="sourceLineNo">225</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.225"></a>
+<span class="sourceLineNo">226</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.226"></a>
+<span class="sourceLineNo">227</span>     * in HFile version 1 and version 2.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>     */<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    /**<a name="line.231"></a>
+<span class="sourceLineNo">232</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.232"></a>
+<span class="sourceLineNo">233</span>     * HFile V2.<a name="line.233"></a>
+<span class="sourceLineNo">234</span>     */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    /**<a name="line.237"></a>
+<span class="sourceLineNo">238</span>     * Return the file context for the HFile this writer belongs to<a name="line.238"></a>
+<span class="sourceLineNo">239</span>     */<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    HFileContext getFileContext();<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /**<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * we want to be able to swap writer implementations.<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  public static class WriterFactory {<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    protected final Configuration conf;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    protected final CacheConfig cacheConf;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    protected FileSystem fs;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    protected Path path;<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    protected FSDataOutputStream ostream;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    protected CellComparator comparator = <a name="line.253"></a>
+<span class="sourceLineNo">254</span>        CellComparator.COMPARATOR;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    protected InetSocketAddress[] favoredNodes;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    private HFileContext fileContext;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    protected boolean shouldDropBehind = false;<a name="line.257"></a>
+<span class="sourceLineNo">258</span><a name="line.258"></a>
+<span class="sourceLineNo">259</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.conf = conf;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.cacheConf = cacheConf;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(fs);<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      Preconditions.checkNotNull(path);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      this.fs = fs;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      this.path = path;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      return this;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span><a name="line.271"></a>
+<span class="sourceLineNo">272</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      Preconditions.checkNotNull(ostream);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      this.ostream = ostream;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      return this;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      Preconditions.checkNotNull(comparator);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      this.comparator = comparator;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      // Deliberately not checking for null here.<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      this.favoredNodes = favoredNodes;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      return this;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      this.fileContext = fileContext;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      return this;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      return this;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
 <span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    public Writer create() throws IOException {<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            "filesystem/path or path");<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      if (path != null) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        try {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (UnsupportedOperationException uoe) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /** The configuration key for HFile version to use for new files */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static int getFormatVersion(Configuration conf) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    checkFormatVersion(version);<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    return version;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
-<span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  /**<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * Disables block cache access for all writers created through the<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * returned factory.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.332"></a>
-<span class="sourceLineNo">333</span>       conf) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Configuration tempConf = new Configuration(conf);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  /**<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      CacheConfig cacheConf) {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    int version = getFormatVersion(conf);<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    switch (version) {<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    case 2:<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.347"></a>
-<span class="sourceLineNo">348</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        "in hbase-site.xml)");<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    case 3:<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    default:<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.354"></a>
-<span class="sourceLineNo">355</span>          "format version " + version);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * An abstraction used by the block index.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  public interface CachingBlockReader {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    /**<a name="line.365"></a>
-<span class="sourceLineNo">366</span>     * Read in a file block.<a name="line.366"></a>
-<span class="sourceLineNo">367</span>     * @param offset offset to read.<a name="line.367"></a>
-<span class="sourceLineNo">368</span>     * @param onDiskBlockSize size of the block<a name="line.368"></a>
-<span class="sourceLineNo">369</span>     * @param cacheBlock<a name="line.369"></a>
-<span class="sourceLineNo">370</span>     * @param pread<a name="line.370"></a>
-<span class="sourceLineNo">371</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.371"></a>
-<span class="sourceLineNo">372</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.372"></a>
-<span class="sourceLineNo">373</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.373"></a>
-<span class="sourceLineNo">374</span>     *  caching efficiency of encoded data blocks)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.375"></a>
-<span class="sourceLineNo">376</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.376"></a>
-<span class="sourceLineNo">377</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.377"></a>
-<span class="sourceLineNo">378</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>     * @return Block wrapped in a ByteBuffer.<a name="line.379"></a>
-<span class="sourceLineNo">380</span>     * @throws IOException<a name="line.380"></a>
-<span class="sourceLineNo">381</span>     */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.382"></a>
-<span class="sourceLineNo">383</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        throws IOException;<a name="line.386"></a>
-<span class="sourceLineNo">387</span><a name="line.387"></a>
-<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
-<span class="sourceLineNo">389</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.389"></a>
-<span class="sourceLineNo">390</span>     * @param block Block to be returned.<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     */<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    void returnBlock(HFileBlock block);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  }<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    /**<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * write.<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     */<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    String getName();<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>    CellComparator getComparator();<a name="line.404"></a>
-<span class="sourceLineNo">405</span><a name="line.405"></a>
-<span class="sourceLineNo">406</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    Cell getLastKey();<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    Cell midkey() throws IOException;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>    long length();<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>    long getEntries();<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>    Cell getFirstKey();<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    long indexSize();<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>    byte[] getFirstRowKey();<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>    byte[] getLastRowKey();<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    FixedFileTrailer getTrailer();<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>    /**<a name="line.436"></a>
-<span class="sourceLineNo">437</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.437"></a>
-<span class="sourceLineNo">438</span>     * {@link HFile} version.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>     * Knows nothing about how that metadata is structured.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>     */<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * {@link HFile}  version.<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * Knows nothing about how that metadata is structured.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     */<a name="line.447"></a>
-<span class="sourceLineNo">448</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.448"></a>
-<span class="sourceLineNo">449</span><a name="line.449"></a>
-<span class="sourceLineNo">450</span>    Path getPath();<a name="line.450"></a>
-<span class="sourceLineNo">451</span><a name="line.451"></a>
-<span class="sourceLineNo">452</span>    /** Close method with optional evictOnClose */<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    void close(boolean evictOnClose) throws IOException;<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.455"></a>
-<span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    boolean hasMVCCInfo();<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>     * Return the file context of the HFile this reader belongs to<a name="line.460"></a>
-<span class="sourceLineNo">461</span>     */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    HFileContext getFileContext();<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    <a name="line.463"></a>
-<span class="sourceLineNo">464</span>    boolean isPrimaryReplicaReader();<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    <a name="line.465"></a>
-<span class="sourceLineNo">466</span>    void setPrimaryReplicaReader(boolean isPrimaryReplicaReader);<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    boolean shouldIncludeMemstoreTS();<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>    boolean isDecodeMemstoreTS();<a name="line.470"></a>
-<span class="sourceLineNo">471</span><a name="line.471"></a>
-<span class="sourceLineNo">472</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @VisibleForTesting<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    @VisibleForTesting<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    boolean prefetchComplete();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>  }<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  /**<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * Method returns the reader given the specified arguments.<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   *<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * @param path hfile's path<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * @param fsdis stream of path's file<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * @param size max size of the trailer.<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * @param hfs<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * @return an appropriate instance of HFileReader<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   */<a name="line.492"></a>
-<span class="sourceLineNo">493</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      justification="Intentional")<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  private static Reader pickReaderVersion(Path path, FSDataInputStreamWrapper fsdis,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      long size, CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FixedFileTrailer trailer = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    try {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      switch (trailer.getMajorVersion()) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      case 2:<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        LOG.debug("Opening HFile v2 with v3 reader");<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      case 3 :<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs, conf);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      default:<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (Throwable t) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      try {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        fsdis.close();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      } catch (Throwable t2) {<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper", t2);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    }<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  /**<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param fs A file system<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param path Path to HFile<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @param fsdis a stream of path's file<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @param size max size of the trailer.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * @param conf Configuration<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * @return A version specific Hfile Reader<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("resource")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  public static Reader createReader(FileSystem fs, Path path,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      FSDataInputStreamWrapper fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      throws IOException {<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    HFileSystem hfs = null;<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // the filesystem.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    if (!(fs instanceof HFileSystem)) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      hfs = new HFileSystem(fs);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } else {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      hfs = (HFileSystem)fs;<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    return pickReaderVersion(path, fsdis, size, cacheConf, hfs, conf);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   *<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * @param fs filesystem<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * @param path Path to file to read<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * @param cacheConf This must not be null.  @see {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * @return an active Reader instance<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   */<a name="line.556"></a>
-<span class="sourceLineNo">557</span>  public static Reader createReader(<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(),<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      cacheConf, stream.getHfs(), conf);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>  /**<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * This factory method is used only by unit tests<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  static Reader createReaderFromStream(Path path,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      FSDataInputStream fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    return pickReaderVersion(path, wrapper, size, cacheConf, null, conf);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  /**<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * @param fs filesystem<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   * @param path Path to file to verify<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   * @throws IOException if failed to read from the underlying stream<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
-<span class="sourceLineNo">582</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   * @param fs filesystem<a name="line.588"></a>
-<span class="sourceLineNo">589</span>   * @param fileStatus the file to verify<a name="line.589"></a>
-<span class="sourceLineNo">590</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * @throws IOException if failed to read from the underlying stream<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   */<a name="line.592"></a>
-<span class="sourceLineNo">593</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      throws IOException {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    final Path path = fileStatus.getPath();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    final long size = fileStatus.getLen();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    try {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.599"></a>
-<span class="sourceLineNo">600</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      return true;<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    } catch (IllegalArgumentException e) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      return false;<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    } catch (IOException e) {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      throw e;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } finally {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      try {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        fsdis.close();<a name="line.609"></a>
-<span class="sourceLineNo">610</span>      } catch (Throwable t) {<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper: " + path, t);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      }<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>  }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>  /**<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;byte [], byte []&gt;(Bytes.BYTES_COMPARATOR);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    public FileInfo() {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      super();<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
-<span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * key prefix.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     *<a name="line.638"></a>
-<span class="sourceLineNo">639</span>     * @param k key to add<a name="line.639"></a>
-<span class="sourceLineNo">640</span>     * @param v value to add<a name="line.640"></a>
-<span class="sourceLineNo">641</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     *          with the reserved prefix<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     * @return this file info object<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     * @throws IOException if the key or value is invalid<a name="line.644"></a>
-<span class="sourceLineNo">645</span>     */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        final boolean checkPrefix) throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      if (k == null || v == null) {<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        throw new NullPointerException("Key nor value may be null");<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      }<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.652"></a>
-<span class="sourceLineNo">653</span>            + " are reserved");<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      put(k, v);<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return this;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span><a name="line.658"></a>
-<span class="sourceLineNo">659</span>    public void clear() {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      this.map.clear();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span><a name="line.662"></a>
-<span class="sourceLineNo">663</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>      return map.comparator();<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    }<a name="line.665"></a>
-<span class="sourceLineNo">666</span><a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public boolean containsKey(Object key) {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return map.containsKey(key);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    public boolean containsValue(Object value) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      return map.containsValue(value);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      return map.entrySet();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    public boolean equals(Object o) {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      return map.equals(o);<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span><a name="line.682"></a>
-<span class="sourceLineNo">683</span>    public byte[] firstKey() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      return map.firstKey();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
-<span class="sourceLineNo">686</span><a name="line.686"></a>
-<span class="sourceLineNo">687</span>    public byte[] get(Object key) {<a name="line.687"></a>
-<span class="sourceLineNo">688</span>      return map.get(key);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>    }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public int hashCode() {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      return map.hashCode();<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>      return this.map.headMap(toKey);<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    }<a name="line.697"></a>
-<span class="sourceLineNo">698</span><a name="line.698"></a>
-<span class="sourceLineNo">699</span>    public boolean isEmpty() {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return map.isEmpty();<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      return map.keySet();<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>    public byte[] lastKey() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>      return map.lastKey();<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    }<a name="line.709"></a>
-<span class="sourceLineNo">710</span><a name="line.710"></a>
-<span class="sourceLineNo">711</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      return this.map.put(key, value);<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    }<a name="line.713"></a>
-<span class="sourceLineNo">714</span><a name="line.714"></a>
-<span class="sourceLineNo">715</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.map.putAll(m);<a name="line.716"></a>
-<span class="sourceLineNo">717</span>    }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public byte[] remove(Object key) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return this.map.remove(key);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    public int size() {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      return map.size();<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    }<a name="line.725"></a>
-<span class="sourceLineNo">726</span><a name="line.726"></a>
-<span class="sourceLineNo">727</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return this.map.subMap(fromKey, toKey);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      return this.map.tailMap(fromKey);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    }<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>    public Collection&lt;byte[]&gt; values() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      return map.values();<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>    /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.740"></a>
-<span class="sourceLineNo">741</span>     * We write it as a protobuf.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>     * @param out<a name="line.742"></a>
-<span class="sourceLineNo">743</span>     * @throws IOException<a name="line.743"></a>
-<span class="sourceLineNo">744</span>     * @see #read(DataInputStream)<a name="line.744"></a>
-<span class="sourceLineNo">745</span>     */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    void write(final DataOutputStream out) throws IOException {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>        bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));<a name="line.751"></a>
-<span class="sourceLineNo">752</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      }<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      builder.build().writeDelimitedTo(out);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.759"></a>
-<span class="sourceLineNo">760</span>     * Can deserialize protobuf of old Writables format.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>     * @param in<a name="line.761"></a>
-<span class="sourceLineNo">762</span>     * @throws IOException<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * @see #write(DataOutputStream)<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    void read(final DataInputStream in) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      byte [] pbuf = new byte[pblen];<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (in.markSupported()) in.mark(pblen);<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      int read = in.read(pbuf);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      } else {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>        if (in.markSupported()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>          in.reset();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>          parseWritable(in);<a name="line.777"></a>
-<span class="sourceLineNo">778</span>        } else {<a name="line.778"></a>
-<span class="sourceLineNo">779</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.782"></a>
-<span class="sourceLineNo">783</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.783"></a>
-<span class="sourceLineNo">784</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.784"></a>
-<span class="sourceLineNo">785</span>          parseWritable(new DataInputStream(sis));<a name="line.785"></a>
-<span class="sourceLineNo">786</span>        }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.790"></a>
-<span class="sourceLineNo">791</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.791"></a>
-<span class="sourceLineNo">792</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>     * @throws IOException<a name="line.793"></a>
-<span class="sourceLineNo">794</span>     */<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      this.map.clear();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      // Read the number of entries in the map<a name="line.798"></a>
-<span class="sourceLineNo">799</span>      int entries = in.readInt();<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      // Then read each key/value pair<a name="line.800"></a>
-<span class="sourceLineNo">801</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        byte [] key = Bytes.readByteArray(in);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        in.readByte();<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        byte [] value = Bytes.readByteArray(in);<a name="line.805"></a>
-<span class="sourceLineNo">806</span>        this.map.put(key, value);<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>    }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span>    /**<a name="line.810"></a>
-<span class="sourceLineNo">811</span>     * Fill our map with content of the pb we read off disk<a name="line.811"></a>
-<span class="sourceLineNo">812</span>     * @param fip protobuf message to read<a name="line.812"></a>
-<span class="sourceLineNo">813</span>     */<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      this.map.clear();<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    }<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.822"></a>
-<span class="sourceLineNo">823</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.824"></a>
-<span class="sourceLineNo">825</span>  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span><a name="line.826"></a>
-<span class="sourceLineNo">827</span>  /**<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * HFile.Writer.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   *<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * @return Array of strings, each represents a supported compression<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   *         supported.<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   *         &lt;ul&gt;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>   *         &lt;li&gt;"none" - No compression.<a name="line.835"></a>
-<span class="sourceLineNo">836</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   *         &lt;/ul&gt;<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   */<a name="line.838"></a>
-<span class="sourceLineNo">839</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    return Compression.getSupportedAlgorithms();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
-<span class="sourceLineNo">842</span><a name="line.842"></a>
-<span class="sourceLineNo">843</span>  // Utility methods.<a name="line.843"></a>
-<span class="sourceLineNo">844</span>  /*<a name="line.844"></a>
-<span class="sourceLineNo">845</span>   * @param l Long to convert to an int.<a name="line.845"></a>
-<span class="sourceLineNo">846</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
-<span class="sourceLineNo">848</span>  static int longToInt(final long l) {<a name="line.848"></a>
-<span class="sourceLineNo">849</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.851"></a>
-<span class="sourceLineNo">852</span>  }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>  /**<a name="line.854"></a>
-<span class="sourceLineNo">855</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.855"></a>
-<span class="sourceLineNo">856</span>   * empty list.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>   *<a name="line.857"></a>
-<span class="sourceLineNo">858</span>   * @param fs  The file system reference.<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * @param regionDir  The region directory to scan.<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * @return The list of files found.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * @throws IOException When scanning the files fails.<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   */<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws IOException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;Path&gt;();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for(FileStatus dir : familyDirs) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      for (FileStatus file : files) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        if (!file.isDirectory() &amp;&amp;<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          regionHFiles.add(file.getPath());<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>    }<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    return regionHFiles;<a name="line.878"></a>
-<span class="sourceLineNo">879</span>  }<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>  /**<a name="line.881"></a>
-<span class="sourceLineNo">882</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.882"></a>
-<span class="sourceLineNo">883</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.883"></a>
-<span class="sourceLineNo">884</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.884"></a>
-<span class="sourceLineNo">885</span>   * indicate that this is not a software error, but corrupted input.<a name="line.885"></a>
-<span class="sourceLineNo">886</span>   *<a name="line.886"></a>
-<span class="sourceLineNo">887</span>   * @param version an HFile version<a name="line.887"></a>
-<span class="sourceLineNo">888</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   */<a name="line.889"></a>
-<span class="sourceLineNo">890</span>  public static void checkFormatVersion(int version)<a name="line.890"></a>
-<span class="sourceLineNo">891</span>      throws IllegalArgumentException {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.894"></a>
-<span class="sourceLineNo">895</span>          + MAX_FORMAT_VERSION + ")");<a name="line.895"></a>
-<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
-<span class="sourceLineNo">898</span><a name="line.898"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>    public Writer create() throws IOException {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.303"></a>
+<span class="sourceLineNo">304</span>            "filesystem/path or path");<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      if (path != null) {<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        try {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        } catch (UnsupportedOperationException uoe) {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        }<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      }<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    }<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  /** The configuration key for HFile version to use for new files */<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  public static int getFormatVersion(Configuration conf) {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    checkFormatVersion(version);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return version;<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * Disables block cache access for all writers created through the<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * returned factory.<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   */<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.333"></a>
+<span class="sourceLineNo">334</span>       conf) {<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    Configuration tempConf = new Configuration(conf);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  /**<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      CacheConfig cacheConf) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    int version = getFormatVersion(conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    switch (version) {<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    case 2:<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.350"></a>
+<span class="sourceLineNo">351</span>        "in hbase-site.xml)");<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    case 3:<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    default:<a name="line.354"></a>
+<span class="sourceLineNo">355</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          "format version " + version);<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  /**<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * An abstraction used by the block index.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public interface CachingBlockReader {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>     * Read in a file block.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>     * @param offset offset to read.<a name="line.368"></a>
+<span class="sourceLineNo">369</span>     * @param onDiskBlockSize size of the block<a name="line.369"></a>
+<span class="sourceLineNo">370</span>     * @param cacheBlock<a name="line.370"></a>
+<span class="sourceLineNo">371</span>     * @param pread<a name="line.371"></a>
+<span class="sourceLineNo">372</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.372"></a>
+<span class="sourceLineNo">373</span>     * @param expectedBlockType the 

<TRUNCATED>

[38/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
index 77fe3eb..c36e999 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
@@ -177,8 +177,8 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <tr class="rowColor">
 <td class="colFirst"><code>static <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#BUCKET_CACHE_PERSISTENT_PATH_KEY">BUCKET_CACHE_PERSISTENT_PATH_KEY</a></strong></code>
-<div class="block">If the chosen ioengine can persist its state across restarts, the path to the file to
- persist to.</div>
+<div class="block">If the chosen ioengine can persist its state across restarts, the path to the file to persist
+ to.</div>
 </td>
 </tr>
 <tr class="altColor">
@@ -654,9 +654,15 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>BUCKET_CACHE_PERSISTENT_PATH_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.88">BUCKET_CACHE_PERSISTENT_PATH_KEY</a></pre>
-<div class="block">If the chosen ioengine can persist its state across restarts, the path to the file to
- persist to.</div>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.94">BUCKET_CACHE_PERSISTENT_PATH_KEY</a></pre>
+<div class="block">If the chosen ioengine can persist its state across restarts, the path to the file to persist
+ to. This file is NOT the data file. It is a file into which we will serialize the map of
+ what is in the data file. For example, if you pass the following argument as
+ BUCKET_CACHE_IOENGINE_KEY ("hbase.bucketcache.ioengine"),
+ <code>file:/tmp/bucketcache.data </code>, then we will write the bucketcache data to the file
+ <code>/tmp/bucketcache.data</code> but the metadata on where the data is in the supplied file
+ is an in-memory map that needs to be persisted across restarts. Where to store this
+ in-memory state is what you supply here: e.g. <code>/tmp/bucketcache.map</code>.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.BUCKET_CACHE_PERSISTENT_PATH_KEY">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -666,7 +672,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>BUCKET_CACHE_COMBINED_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.96">BUCKET_CACHE_COMBINED_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.102">BUCKET_CACHE_COMBINED_KEY</a></pre>
 <div class="block">If the bucket cache is used in league with the lru on-heap block cache (meta blocks such
  as indices and blooms are kept in the lru blockcache and the data blocks in the
  bucket cache).</div>
@@ -679,7 +685,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>BUCKET_CACHE_WRITER_THREADS_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.99">BUCKET_CACHE_WRITER_THREADS_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.105">BUCKET_CACHE_WRITER_THREADS_KEY</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.BUCKET_CACHE_WRITER_THREADS_KEY">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -689,7 +695,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>BUCKET_CACHE_WRITER_QUEUE_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.100">BUCKET_CACHE_WRITER_QUEUE_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.106">BUCKET_CACHE_WRITER_QUEUE_KEY</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.BUCKET_CACHE_WRITER_QUEUE_KEY">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -699,7 +705,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>BUCKET_CACHE_BUCKETS_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.106">BUCKET_CACHE_BUCKETS_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.112">BUCKET_CACHE_BUCKETS_KEY</a></pre>
 <div class="block">A comma-delimited array of values for use as bucket sizes.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.BUCKET_CACHE_BUCKETS_KEY">Constant Field Values</a></dd></dl>
 </li>
@@ -710,7 +716,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_BUCKET_CACHE_COMBINED</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.111">DEFAULT_BUCKET_CACHE_COMBINED</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.117">DEFAULT_BUCKET_CACHE_COMBINED</a></pre>
 <div class="block">Defaults for Bucket cache</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_BUCKET_CACHE_COMBINED">Constant Field Values</a></dd></dl>
 </li>
@@ -721,7 +727,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_BUCKET_CACHE_WRITER_THREADS</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.112">DEFAULT_BUCKET_CACHE_WRITER_THREADS</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.118">DEFAULT_BUCKET_CACHE_WRITER_THREADS</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_BUCKET_CACHE_WRITER_THREADS">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -731,7 +737,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_BUCKET_CACHE_WRITER_QUEUE</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.113">DEFAULT_BUCKET_CACHE_WRITER_QUEUE</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.119">DEFAULT_BUCKET_CACHE_WRITER_QUEUE</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_BUCKET_CACHE_WRITER_QUEUE">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -741,7 +747,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>PREFETCH_BLOCKS_ON_OPEN_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.119">PREFETCH_BLOCKS_ON_OPEN_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.125">PREFETCH_BLOCKS_ON_OPEN_KEY</a></pre>
 <div class="block">Configuration key to prefetch all blocks of a given file into the block cache
  when the file is opened.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY">Constant Field Values</a></dd></dl>
@@ -753,7 +759,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>BLOCKCACHE_BLOCKSIZE_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.128">BLOCKCACHE_BLOCKSIZE_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.134">BLOCKCACHE_BLOCKSIZE_KEY</a></pre>
 <div class="block">The target block size used by blockcache instances. Defaults to
  <a href="../../../../../../org/apache/hadoop/hbase/HConstants.html#DEFAULT_BLOCKSIZE"><code>HConstants.DEFAULT_BLOCKSIZE</code></a>.
  TODO: this config point is completely wrong, as it's used to determine the
@@ -767,7 +773,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>EXTERNAL_BLOCKCACHE_KEY</h4>
-<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.130">EXTERNAL_BLOCKCACHE_KEY</a></pre>
+<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.136">EXTERNAL_BLOCKCACHE_KEY</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.EXTERNAL_BLOCKCACHE_KEY">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -777,7 +783,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>EXTERNAL_BLOCKCACHE_DEFAULT</h4>
-<pre>private static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.131">EXTERNAL_BLOCKCACHE_DEFAULT</a></pre>
+<pre>private static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.137">EXTERNAL_BLOCKCACHE_DEFAULT</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.EXTERNAL_BLOCKCACHE_DEFAULT">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -787,7 +793,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>EXTERNAL_BLOCKCACHE_CLASS_KEY</h4>
-<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.133">EXTERNAL_BLOCKCACHE_CLASS_KEY</a></pre>
+<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.139">EXTERNAL_BLOCKCACHE_CLASS_KEY</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.EXTERNAL_BLOCKCACHE_CLASS_KEY">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -797,7 +803,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DROP_BEHIND_CACHE_COMPACTION_KEY</h4>
-<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.134">DROP_BEHIND_CACHE_COMPACTION_KEY</a></pre>
+<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.140">DROP_BEHIND_CACHE_COMPACTION_KEY</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DROP_BEHIND_CACHE_COMPACTION_KEY">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -807,7 +813,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DROP_BEHIND_CACHE_COMPACTION_DEFAULT</h4>
-<pre>private static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.135">DROP_BEHIND_CACHE_COMPACTION_DEFAULT</a></pre>
+<pre>private static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.141">DROP_BEHIND_CACHE_COMPACTION_DEFAULT</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DROP_BEHIND_CACHE_COMPACTION_DEFAULT">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -817,7 +823,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_DATA_ON_READ</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.158">DEFAULT_CACHE_DATA_ON_READ</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.164">DEFAULT_CACHE_DATA_ON_READ</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_DATA_ON_READ">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -827,7 +833,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_DATA_ON_WRITE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.159">DEFAULT_CACHE_DATA_ON_WRITE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.165">DEFAULT_CACHE_DATA_ON_WRITE</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_DATA_ON_WRITE">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -837,7 +843,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_IN_MEMORY</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.160">DEFAULT_IN_MEMORY</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.166">DEFAULT_IN_MEMORY</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_IN_MEMORY">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -847,7 +853,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_INDEXES_ON_WRITE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.161">DEFAULT_CACHE_INDEXES_ON_WRITE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.167">DEFAULT_CACHE_INDEXES_ON_WRITE</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_INDEXES_ON_WRITE">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -857,7 +863,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_BLOOMS_ON_WRITE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.162">DEFAULT_CACHE_BLOOMS_ON_WRITE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.168">DEFAULT_CACHE_BLOOMS_ON_WRITE</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_BLOOMS_ON_WRITE">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -867,7 +873,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_EVICT_ON_CLOSE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.163">DEFAULT_EVICT_ON_CLOSE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.169">DEFAULT_EVICT_ON_CLOSE</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_EVICT_ON_CLOSE">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -877,7 +883,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_DATA_COMPRESSED</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.164">DEFAULT_CACHE_DATA_COMPRESSED</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.170">DEFAULT_CACHE_DATA_COMPRESSED</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_DATA_COMPRESSED">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -887,7 +893,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_PREFETCH_ON_OPEN</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.165">DEFAULT_PREFETCH_ON_OPEN</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.171">DEFAULT_PREFETCH_ON_OPEN</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_PREFETCH_ON_OPEN">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -897,7 +903,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>blockCache</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.168">blockCache</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.174">blockCache</a></pre>
 <div class="block">Local reference to the block cache, null if completely disabled</div>
 </li>
 </ul>
@@ -907,7 +913,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheDataOnRead</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.176">cacheDataOnRead</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.182">cacheDataOnRead</a></pre>
 <div class="block">Whether blocks should be cached on read (default is on if there is a
  cache but this can be turned off on a per-family or per-request basis).
  If off we will STILL cache meta blocks; i.e. INDEX and BLOOM types.
@@ -920,7 +926,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>inMemory</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.179">inMemory</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.185">inMemory</a></pre>
 <div class="block">Whether blocks should be flagged as in-memory when being cached</div>
 </li>
 </ul>
@@ -930,7 +936,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheDataOnWrite</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.182">cacheDataOnWrite</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.188">cacheDataOnWrite</a></pre>
 <div class="block">Whether data blocks should be cached when new files are written</div>
 </li>
 </ul>
@@ -940,7 +946,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheIndexesOnWrite</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.185">cacheIndexesOnWrite</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.191">cacheIndexesOnWrite</a></pre>
 <div class="block">Whether index blocks should be cached when new files are written</div>
 </li>
 </ul>
@@ -950,7 +956,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheBloomsOnWrite</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.188">cacheBloomsOnWrite</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.194">cacheBloomsOnWrite</a></pre>
 <div class="block">Whether compound bloom filter blocks should be cached on write</div>
 </li>
 </ul>
@@ -960,7 +966,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>evictOnClose</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.191">evictOnClose</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.197">evictOnClose</a></pre>
 <div class="block">Whether blocks of a file should be evicted when the file is closed</div>
 </li>
 </ul>
@@ -970,7 +976,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheDataCompressed</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.194">cacheDataCompressed</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.200">cacheDataCompressed</a></pre>
 <div class="block">Whether data blocks should be stored in compressed and/or encrypted form in the cache</div>
 </li>
 </ul>
@@ -980,7 +986,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>prefetchOnOpen</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.197">prefetchOnOpen</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.203">prefetchOnOpen</a></pre>
 <div class="block">Whether data blocks should be prefetched into the cache</div>
 </li>
 </ul>
@@ -990,7 +996,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheDataInL1</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.204">cacheDataInL1</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.210">cacheDataInL1</a></pre>
 <div class="block">If true and if more than one tier in this cache deploy -- e.g. CombinedBlockCache has an L1
  and an L2 tier -- then cache data blocks up in the L1 tier (The meta blocks are likely being
  cached up in L1 already.  At least this is the case if CombinedBlockCache).</div>
@@ -1002,7 +1008,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>dropBehindCompaction</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.206">dropBehindCompaction</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.212">dropBehindCompaction</a></pre>
 </li>
 </ul>
 <a name="GLOBAL_BLOCK_CACHE_INSTANCE">
@@ -1011,7 +1017,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>GLOBAL_BLOCK_CACHE_INSTANCE</h4>
-<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.517">GLOBAL_BLOCK_CACHE_INSTANCE</a></pre>
+<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.523">GLOBAL_BLOCK_CACHE_INSTANCE</a></pre>
 <div class="block">Static reference to the block cache, or null if no caching should be used
  at all.</div>
 </li>
@@ -1022,7 +1028,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>blockCacheDisabled</h4>
-<pre>static&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.521">blockCacheDisabled</a></pre>
+<pre>static&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.527">blockCacheDisabled</a></pre>
 <div class="block">Boolean whether we have disabled the block cache entirely.</div>
 </li>
 </ul>
@@ -1040,7 +1046,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.214">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.220">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
            <a href="../../../../../../org/apache/hadoop/hbase/HColumnDescriptor.html" title="class in org.apache.hadoop.hbase">HColumnDescriptor</a>&nbsp;family)</pre>
 <div class="block">Create a cache configuration using the specified configuration object and
  family descriptor.</div>
@@ -1053,7 +1059,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.242">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.248">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Create a cache configuration using the specified configuration object and
  defaults for family level settings.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>conf</code> - hbase configuration</dd></dl>
@@ -1065,7 +1071,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.275">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;blockCache,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.281">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;blockCache,
            boolean&nbsp;cacheDataOnRead,
            boolean&nbsp;inMemory,
            boolean&nbsp;cacheDataOnWrite,
@@ -1089,7 +1095,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.299">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.305">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
 <div class="block">Constructs a cache configuration copied from the specified configuration.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>cacheConf</code> - </dd></dl>
 </li>
@@ -1108,7 +1114,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isBlockCacheEnabled</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.310">isBlockCacheEnabled</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.316">isBlockCacheEnabled</a>()</pre>
 <div class="block">Checks whether the block cache is enabled.</div>
 </li>
 </ul>
@@ -1118,7 +1124,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockCache</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.318">getBlockCache</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.324">getBlockCache</a>()</pre>
 <div class="block">Returns the block cache.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the block cache, or null if caching is completely disabled</dd></dl>
 </li>
@@ -1129,7 +1135,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataOnRead</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.327">shouldCacheDataOnRead</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.333">shouldCacheDataOnRead</a>()</pre>
 <div class="block">Returns whether the DATA blocks of this HFile should be cached on read or not (we always
  cache the meta blocks, the INDEX and BLOOM blocks).</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if blocks should be cached on read, false if not</dd></dl>
@@ -1141,7 +1147,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldDropBehindCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.331">shouldDropBehindCompaction</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.337">shouldDropBehindCompaction</a>()</pre>
 </li>
 </ul>
 <a name="shouldCacheBlockOnRead(org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory)">
@@ -1150,7 +1156,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheBlockOnRead</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.340">shouldCacheBlockOnRead</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.346">shouldCacheBlockOnRead</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
 <div class="block">Should we cache a block of a particular category? We always cache
  important blocks such as index blocks, as long as the block cache is
  available.</div>
@@ -1162,7 +1168,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isInMemory</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.353">isInMemory</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.359">isInMemory</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if blocks in this file should be flagged as in-memory</dd></dl>
 </li>
 </ul>
@@ -1172,7 +1178,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isCacheDataInL1</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.360">isCacheDataInL1</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.366">isCacheDataInL1</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>True if cache data blocks in L1 tier (if more than one tier in block cache deploy).</dd></dl>
 </li>
 </ul>
@@ -1182,7 +1188,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.368">shouldCacheDataOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.374">shouldCacheDataOnWrite</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if data blocks should be written to the cache when an HFile is
          written, false if not</dd></dl>
 </li>
@@ -1193,7 +1199,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>setCacheDataOnWrite</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.378">setCacheDataOnWrite</a>(boolean&nbsp;cacheDataOnWrite)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.384">setCacheDataOnWrite</a>(boolean&nbsp;cacheDataOnWrite)</pre>
 <div class="block">Only used for testing.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>cacheDataOnWrite</code> - whether data blocks should be written to the cache
                          when an HFile is written</dd></dl>
@@ -1205,7 +1211,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>setCacheDataInL1</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.388">setCacheDataInL1</a>(boolean&nbsp;cacheDataInL1)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.394">setCacheDataInL1</a>(boolean&nbsp;cacheDataInL1)</pre>
 <div class="block">Only used for testing.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>cacheDataInL1</code> - Whether to cache data blocks up in l1 (if a multi-tier cache
  implementation).</dd></dl>
@@ -1217,7 +1223,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheIndexesOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.396">shouldCacheIndexesOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.402">shouldCacheIndexesOnWrite</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if index blocks should be written to the cache when an HFile
          is written, false if not</dd></dl>
 </li>
@@ -1228,7 +1234,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheBloomsOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.404">shouldCacheBloomsOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.410">shouldCacheBloomsOnWrite</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if bloom blocks should be written to the cache when an HFile
          is written, false if not</dd></dl>
 </li>
@@ -1239,7 +1245,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldEvictOnClose</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.412">shouldEvictOnClose</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.418">shouldEvictOnClose</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if blocks should be evicted from the cache when an HFile
          reader is closed, false if not</dd></dl>
 </li>
@@ -1250,7 +1256,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>setEvictOnClose</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.421">setEvictOnClose</a>(boolean&nbsp;evictOnClose)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.427">setEvictOnClose</a>(boolean&nbsp;evictOnClose)</pre>
 <div class="block">Only used for testing.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>evictOnClose</code> - whether blocks should be evicted from the cache when an
                      HFile reader is closed</dd></dl>
@@ -1262,7 +1268,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataCompressed</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.428">shouldCacheDataCompressed</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.434">shouldCacheDataCompressed</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if data blocks should be compressed in the cache, false if not</dd></dl>
 </li>
 </ul>
@@ -1272,7 +1278,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheCompressed</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.435">shouldCacheCompressed</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.441">shouldCacheCompressed</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if this <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><code>BlockType.BlockCategory</code></a> should be compressed in blockcache, false otherwise</dd></dl>
 </li>
 </ul>
@@ -1282,7 +1288,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldPrefetchOnOpen</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.448">shouldPrefetchOnOpen</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.454">shouldPrefetchOnOpen</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if blocks should be prefetched into the cache on open, false if not</dd></dl>
 </li>
 </ul>
@@ -1292,7 +1298,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldReadBlockFromCache</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.460">shouldReadBlockFromCache</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.466">shouldReadBlockFromCache</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
 <div class="block">Return true if we may find this type of block in block cache.
  <p>
  TODO: today <code>family.isBlockCacheEnabled()</code> only means <code>cacheDataOnRead</code>, so here we
@@ -1307,7 +1313,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldLockOnCacheMiss</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.487">shouldLockOnCacheMiss</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.493">shouldLockOnCacheMiss</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
 <div class="block">If we make sure the block could not be cached, we will not acquire the lock
  otherwise we will acquire lock</div>
 </li>
@@ -1318,7 +1324,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.495">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.501">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -1331,7 +1337,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getLruCacheSize</h4>
-<pre>static&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.523">getLruCacheSize</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>static&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.529">getLruCacheSize</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                    <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/management/MemoryUsage.html?is-external=true" title="class or interface in java.lang.management">MemoryUsage</a>&nbsp;mu)</pre>
 </li>
 </ul>
@@ -1341,7 +1347,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getL1</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.544">getL1</a>(org.apache.hadoop.conf.Configuration&nbsp;c,
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.550">getL1</a>(org.apache.hadoop.conf.Configuration&nbsp;c,
                   <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/management/MemoryUsage.html?is-external=true" title="class or interface in java.lang.management">MemoryUsage</a>&nbsp;mu)</pre>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>c</code> - Configuration to use.</dd><dd><code>mu</code> - JMX Memory Bean</dd>
 <dt><span class="strong">Returns:</span></dt><dd>An L1 instance.  Currently an instance of LruBlockCache.</dd></dl>
@@ -1353,7 +1359,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getL2</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.559">getL2</a>(org.apache.hadoop.conf.Configuration&nbsp;c,
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.565">getL2</a>(org.apache.hadoop.conf.Configuration&nbsp;c,
                <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/management/MemoryUsage.html?is-external=true" title="class or interface in java.lang.management">MemoryUsage</a>&nbsp;mu)</pre>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>c</code> - Configuration to use.</dd><dd><code>mu</code> - JMX Memory Bean</dd>
 <dt><span class="strong">Returns:</span></dt><dd>Returns L2 block cache instance (for now it is BucketCache BlockCache all the time)
@@ -1366,7 +1372,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getExternalBlockcache</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.575">getExternalBlockcache</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.581">getExternalBlockcache</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
 </li>
 </ul>
 <a name="getBucketCache(org.apache.hadoop.conf.Configuration, java.lang.management.MemoryUsage)">
@@ -1375,7 +1381,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getBucketCache</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.601">getBucketCache</a>(org.apache.hadoop.conf.Configuration&nbsp;c,
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.607">getBucketCache</a>(org.apache.hadoop.conf.Configuration&nbsp;c,
                         <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/management/MemoryUsage.html?is-external=true" title="class or interface in java.lang.management">MemoryUsage</a>&nbsp;mu)</pre>
 </li>
 </ul>
@@ -1385,7 +1391,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>instantiateBlockCache</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.653">instantiateBlockCache</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.659">instantiateBlockCache</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Returns the block cache or <code>null</code> in case none should be used.
  Sets GLOBAL_BLOCK_CACHE_INSTANCE</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>conf</code> - The current configuration.</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
index 176d4c9..3f70cb0 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
@@ -99,7 +99,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.364">HFile.CachingBlockReader</a></pre>
+<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.365">HFile.CachingBlockReader</a></pre>
 <div class="block">An abstraction used by the block index.
  Implementations will check cache for any asked-for block and return cached block if found.
  Otherwise, after reading from fs, will try and put block into cache before returning.</div>
@@ -161,7 +161,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>readBlock</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#line.382">readBlock</a>(long&nbsp;offset,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#line.383">readBlock</a>(long&nbsp;offset,
                    long&nbsp;onDiskBlockSize,
                    boolean&nbsp;cacheBlock,
                    boolean&nbsp;pread,
@@ -188,7 +188,7 @@
 <ul class="blockListLast">
 <li class="blockList">
 <h4>returnBlock</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#line.392">returnBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#line.393">returnBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block)</pre>
 <div class="block">Return the given block back to the cache, if it was obtained from cache.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>block</code> - Block to be returned.</dd></dl>
 </li>


[44/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/checkstyle.rss
----------------------------------------------------------------------
diff --git a/checkstyle.rss b/checkstyle.rss
index fa53009..b525c75 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
     <copyright>&#169;2007 - 2016 The Apache Software Foundation</copyright>
     <item>
       <title>File: 1693,
-             Errors: 12706,
+             Errors: 12697,
              Warnings: 0,
              Infos: 0
       </title>
@@ -1777,7 +1777,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  24
+                  23
                 </td>
               </tr>
                           <tr>
@@ -3611,7 +3611,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  7
+                  4
                 </td>
               </tr>
                           <tr>
@@ -6061,7 +6061,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  1
+                  0
                 </td>
               </tr>
                           <tr>
@@ -12431,7 +12431,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  52
+                  51
                 </td>
               </tr>
                           <tr>
@@ -13915,7 +13915,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  28
+                  27
                 </td>
               </tr>
                           <tr>
@@ -20243,7 +20243,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  15
+                  14
                 </td>
               </tr>
                           <tr>
@@ -21321,7 +21321,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  7
+                  6
                 </td>
               </tr>
                           <tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/coc.html
----------------------------------------------------------------------
diff --git a/coc.html b/coc.html
index 31b3799..79266d3 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC may opt to skip early
                         <a href="http://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2016-03-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2016-03-04</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/cygwin.html
----------------------------------------------------------------------
diff --git a/cygwin.html b/cygwin.html
index 9a27b5f..648ab86 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Installing Apache HBase (TM) on Windows using Cygwin</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -673,7 +673,7 @@ Now your <b>HBase </b>server is running, <b>start coding</b> and build that next
                         <a href="http://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2016-03-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2016-03-04</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/dependencies.html
----------------------------------------------------------------------
diff --git a/dependencies.html b/dependencies.html
index 8403051..deeddd9 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependencies</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -518,7 +518,7 @@
                         <a href="http://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2016-03-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2016-03-04</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/dependency-convergence.html
----------------------------------------------------------------------
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 485e456..5ce6e66 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Reactor Dependency Convergence</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -1648,7 +1648,7 @@
                         <a href="http://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2016-03-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2016-03-04</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/dependency-info.html
----------------------------------------------------------------------
diff --git a/dependency-info.html b/dependency-info.html
index 77a24fd..1e768a9 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Dependency Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -312,7 +312,7 @@
                         <a href="http://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2016-03-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2016-03-04</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/dependency-management.html
----------------------------------------------------------------------
diff --git a/dependency-management.html b/dependency-management.html
index bec81a8..e129bcb 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependency Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -786,7 +786,7 @@
                         <a href="http://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2016-03-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2016-03-04</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/constant-values.html
----------------------------------------------------------------------
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 6fe515e..d886556 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -6851,20 +6851,13 @@
 <td class="colLast"><code>false</code></td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.HFileBlock.ENCODED_HEADER_SIZE">
-<!--   -->
-</a><code>public&nbsp;static&nbsp;final&nbsp;int</code></td>
-<td><code><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html#ENCODED_HEADER_SIZE">ENCODED_HEADER_SIZE</a></code></td>
-<td class="colLast"><code>35</code></td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.HFileBlock.EXTRA_SERIALIZATION_SPACE">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;int</code></td>
 <td><code><a href="org/apache/hadoop/hbase/io/hfile/HFileBlock.html#EXTRA_SERIALIZATION_SPACE">EXTRA_SERIALIZATION_SPACE</a></code></td>
 <td class="colLast"><code>13</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.HFileBlock.FILL_HEADER">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;boolean</code></td>
@@ -7095,6 +7088,13 @@
 <td><code><a href="org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#KEY_VALUE_VER_WITH_MEMSTORE">KEY_VALUE_VER_WITH_MEMSTORE</a></code></td>
 <td class="colLast"><code>1</code></td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.HFileWriterImpl.UNSET">
+<!--   -->
+</a><code>private&nbsp;static&nbsp;final&nbsp;long</code></td>
+<td><code><a href="org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#UNSET">UNSET</a></code></td>
+<td class="colLast"><code>-1L</code></td>
+</tr>
 </tbody>
 </table>
 </li>
@@ -16108,7 +16108,7 @@
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#DEPRECATED_NAME_OF_NO_LIMIT_THROUGHPUT_CONTROLLER_CLASS">DEPRECATED_NAME_OF_NO_LIMIT_THROUGHPUT_CONTROLLER_CLASS</a></code></td>
-<td class="colLast"><code>"org.apache.hadoop.hbase.regionserver.compactions.NoLimitThroughputController.java"</code></td>
+<td class="colLast"><code>"org.apache.hadoop.hbase.regionserver.compactions.NoLimitThroughputController"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory.DEPRECATED_NAME_OF_PRESSURE_AWARE_THROUGHPUT_CONTROLLER_CLASS">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/index-all.html
----------------------------------------------------------------------
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index aef5e16..149c5f3 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -4628,7 +4628,9 @@
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html#blockDataSizes">blockDataSizes</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexReader</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#blockDeserializer">blockDeserializer</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">Used deserializing blocks from Cache.</div>
+</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#BLOCKED_REQUESTS_COUNT">BLOCKED_REQUESTS_COUNT</a></span> - Static variable in interface org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html" title="interface in org.apache.hadoop.hbase.regionserver">MetricsRegionServerSource</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#BLOCKED_REQUESTS_COUNT_DESC">BLOCKED_REQUESTS_COUNT_DESC</a></span> - Static variable in interface org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html" title="interface in org.apache.hadoop.hbase.regionserver">MetricsRegionServerSource</a></dt>
@@ -4828,6 +4830,10 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html#blockWriter">blockWriter</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#blockWriter">blockWriter</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileWriterImpl</a></dt>
+<dd>
+<div class="block">block writer</div>
+</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.html#blockWritten(long,%20int,%20int)">blockWritten(long, int, int)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">CompoundBloomFilterWriter</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html#blockWritten(long,%20int,%20int)">blockWritten(long, int, int)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a></dt>
@@ -4990,8 +4996,8 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/CacheConfig.html#BUCKET_CACHE_PERSISTENT_PATH_KEY">BUCKET_CACHE_PERSISTENT_PATH_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
 <dd>
-<div class="block">If the chosen ioengine can persist its state across restarts, the path to the file to
- persist to.</div>
+<div class="block">If the chosen ioengine can persist its state across restarts, the path to the file to persist
+ to.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HConstants.html#BUCKET_CACHE_SIZE_KEY">BUCKET_CACHE_SIZE_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HConstants.html" title="class in org.apache.hadoop.hbase">HConstants</a></dt>
 <dd>
@@ -5039,7 +5045,7 @@
 <dt><a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><span class="strong">BucketCache</span></a> - Class in <a href="./org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html">org.apache.hadoop.hbase.io.hfile.bucket</a></dt>
 <dd>
 <div class="block">BucketCache uses <a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketAllocator</code></a> to allocate/free blocks, and uses
- <a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ramCache"><code>BucketCache.ramCache</code></a> and <a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#backingMap"><code>BucketCache.backingMap</code></a> in order to
+ BucketCache#ramCache and BucketCache#backingMap in order to
  determine if a given element is in the cache.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#BucketCache(java.lang.String,%20long,%20int,%20int[],%20int,%20int,%20java.lang.String)">BucketCache(String, long, int, int[], int, int, String)</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.bucket.<a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></dt>
@@ -8155,6 +8161,8 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html#checkState(int)">checkState(int)</a></span> - Method in interface org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.WaitForStateCallable.html" title="interface in org.apache.hadoop.hbase.client">HBaseAdmin.ProcedureFuture.WaitForStateCallable</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFile.html#CHECKSUM_FAILURES">CHECKSUM_FAILURES</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#CHECKSUM_SIZE">CHECKSUM_SIZE</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>
 <div class="block">Each checksum value is an integer that can be stored in 4 bytes.</div>
@@ -8167,9 +8175,8 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD">CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>
-<div class="block">On a checksum failure on a Reader, these many suceeding read
- requests switch back to using hdfs checksums before auto-reenabling
- hbase checksum verification.</div>
+<div class="block">On a checksum failure, do these many succeeding read requests using hdfs checksums before
+ auto-reenabling hbase checksum verification.</div>
 </dd>
 <dt><a href="./org/apache/hadoop/hbase/util/ChecksumFactory.html" title="class in org.apache.hadoop.hbase.util"><span class="strong">ChecksumFactory</span></a> - Class in <a href="./org/apache/hadoop/hbase/util/package-summary.html">org.apache.hadoop.hbase.util</a></dt>
 <dd>
@@ -8177,8 +8184,6 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/util/ChecksumFactory.html#ChecksumFactory()">ChecksumFactory()</a></span> - Constructor for class org.apache.hadoop.hbase.util.<a href="./org/apache/hadoop/hbase/util/ChecksumFactory.html" title="class in org.apache.hadoop.hbase.util">ChecksumFactory</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFile.html#checksumFailures">checksumFailures</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html#checksumOk()">checksumOk()</a></span> - Method in class org.apache.hadoop.hbase.io.<a href="./org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a></dt>
 <dd>
 <div class="block">Report that checksum was ok, so we may ponder going back to HBase checksum.</div>
@@ -16257,6 +16262,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#DATABLOCK_ENCODING_OVERRIDE_CONF_KEY">DATABLOCK_ENCODING_OVERRIDE_CONF_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.mapreduce.<a href="./org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html" title="class in org.apache.hadoop.hbase.mapreduce">HFileOutputFormat2</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFile.html#DATABLOCK_READ_COUNT">DATABLOCK_READ_COUNT</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html#dataBlockCount">dataBlockCount</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheUtil.CachedBlocksByFile</a></dt>
 <dd>&nbsp;</dd>
 <dt><a href="./org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.encoding"><span class="strong">DataBlockEncoder</span></a> - Interface in <a href="./org/apache/hadoop/hbase/io/encoding/package-summary.html">org.apache.hadoop.hbase.io.encoding</a></dt>
@@ -16296,8 +16303,6 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#dataBlockIndexWriter">dataBlockIndexWriter</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileWriterImpl</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFile.html#dataBlockReadCnt">dataBlockReadCnt</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/procedure/Procedure.html#dataFromFinishedMembers">dataFromFinishedMembers</a></span> - Variable in class org.apache.hadoop.hbase.procedure.<a href="./org/apache/hadoop/hbase/procedure/Procedure.html" title="class in org.apache.hadoop.hbase.procedure">Procedure</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html#dataIndexCount">dataIndexCount</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a></dt>
@@ -19079,8 +19084,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#disableCache()">disableCache()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></dt>
 <dd>
-<div class="block">Used to shut down the cache -or- turn it off in the case of something
- broken.</div>
+<div class="block">Used to shut down the cache -or- turn it off in the case of something broken.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/constraint/Constraints.html#disableConstraint(org.apache.hadoop.hbase.HTableDescriptor,%20java.lang.Class)">disableConstraint(HTableDescriptor, Class&lt;? extends Constraint&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.constraint.<a href="./org/apache/hadoop/hbase/constraint/Constraints.html" title="class in org.apache.hadoop.hbase.constraint">Constraints</a></dt>
 <dd>
@@ -20426,10 +20430,6 @@
 <dd>
 <div class="block">Encodes a byte array into Base64 notation.</div>
 </dd>
-<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#ENCODED_HEADER_SIZE">ENCODED_HEADER_SIZE</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
-<dd>
-<div class="block">The size of block header when blockType is <a href="./org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a>.</div>
-</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HRegionInfo.html#ENCODED_REGION_NAME_REGEX">ENCODED_REGION_NAME_REGEX</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a></dt>
 <dd>
 <div class="block">A non-capture group so that this can be embedded.</div>
@@ -22138,8 +22138,6 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#expectState(org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State)">expectState(HFileBlock.Writer.State)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#expectType(org.apache.hadoop.hbase.io.hfile.BlockType)">expectType(BlockType)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.html#expEvaluator">expEvaluator</a></span> - Variable in class org.apache.hadoop.hbase.security.visibility.<a href="./org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.html" title="class in org.apache.hadoop.hbase.security.visibility">VisibilityLabelFilter</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/TaskMonitor.html#EXPIRATION_TIME">EXPIRATION_TIME</a></span> - Static variable in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/TaskMonitor.html" title="class in org.apache.hadoop.hbase.monitoring">TaskMonitor</a></dt>
@@ -22279,7 +22277,9 @@
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/CacheConfig.html#EXTERNAL_BLOCKCACHE_KEY">EXTERNAL_BLOCKCACHE_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#EXTRA_SERIALIZATION_SPACE">EXTRA_SERIALIZATION_SPACE</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">See #blockDeserializer method for more info.</div>
+</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/security/visibility/VisibilityUtils.html#extractAndPartitionTags(org.apache.hadoop.hbase.Cell,%20java.util.List,%20java.util.List)">extractAndPartitionTags(Cell, List&lt;Tag&gt;, List&lt;Tag&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.security.visibility.<a href="./org/apache/hadoop/hbase/security/visibility/VisibilityUtils.html" title="class in org.apache.hadoop.hbase.security.visibility">VisibilityUtils</a></dt>
 <dd>
 <div class="block">Extracts and partitions the visibility tags and nonVisibility Tags</div>
@@ -22886,7 +22886,9 @@
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/quotas/QuotaCache.QuotaRefresherChore.html#fetchUserQuotaState()">fetchUserQuotaState()</a></span> - Method in class org.apache.hadoop.hbase.quotas.<a href="./org/apache/hadoop/hbase/quotas/QuotaCache.QuotaRefresherChore.html" title="class in org.apache.hadoop.hbase.quotas">QuotaCache.QuotaRefresherChore</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#FEWEST_ITEMS_IN_BUCKET">FEWEST_ITEMS_IN_BUCKET</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.bucket.<a href="./org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">So, what is the minimum amount of items we'll tolerate in a single bucket?</div>
+</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/rest/MetricsRESTSourceImpl.html#fGet">fGet</a></span> - Variable in class org.apache.hadoop.hbase.rest.<a href="./org/apache/hadoop/hbase/rest/MetricsRESTSourceImpl.html" title="class in org.apache.hadoop.hbase.rest">MetricsRESTSourceImpl</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/types/Struct.html#fields">fields</a></span> - Variable in class org.apache.hadoop.hbase.types.<a href="./org/apache/hadoop/hbase/types/Struct.html" title="class in org.apache.hadoop.hbase.types">Struct</a></dt>
@@ -23756,7 +23758,7 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#finishBlock()">finishBlock()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileWriterImpl</a></dt>
 <dd>
-<div class="block">Clean up the current data block</div>
+<div class="block">Clean up the data block that is currently being written.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#finishBlockAndWriteHeaderAndData(java.io.DataOutputStream)">finishBlockAndWriteHeaderAndData(DataOutputStream)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a></dt>
 <dd>
@@ -24849,10 +24851,6 @@
 <dd>
 <div class="block">Filesystem-level block reader.</div>
 </dd>
-<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#fsBlockWriter">fsBlockWriter</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileWriterImpl</a></dt>
-<dd>
-<div class="block">block writer</div>
-</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/rest/MetricsRESTSourceImpl.html#fScan">fScan</a></span> - Variable in class org.apache.hadoop.hbase.rest.<a href="./org/apache/hadoop/hbase/rest/MetricsRESTSourceImpl.html" title="class in org.apache.hadoop.hbase.rest">MetricsRESTSourceImpl</a></dt>
 <dd>&nbsp;</dd>
 <dt><a href="./org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io"><span class="strong">FSDataInputStreamWrapper</span></a> - Class in <a href="./org/apache/hadoop/hbase/io/package-summary.html">org.apache.hadoop.hbase.io</a></dt>
@@ -42201,7 +42199,7 @@
 <dd>&nbsp;</dd>
 <dt><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileBlock</span></a> - Class in <a href="./org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a></dt>
 <dd>
-<div class="block">Reading <a href="./org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and 2 blocks, and writing version 2 blocks.</div>
+<div class="block">Reads <a href="./org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and version 2 blocks but writes version 2 blocks only.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#HFileBlock(org.apache.hadoop.hbase.io.hfile.BlockType,%20int,%20int,%20long,%20org.apache.hadoop.hbase.nio.ByteBuff,%20boolean,%20long,%20int,%20org.apache.hadoop.hbase.io.hfile.HFileContext)">HFileBlock(BlockType, int, int, long, ByteBuff, boolean, long, int, HFileContext)</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>
@@ -44814,7 +44812,9 @@
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/Result.html#INITIAL_CELLSCANNER_INDEX">INITIAL_CELLSCANNER_INDEX</a></span> - Static variable in class org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#INITIAL_KEY_BUFFER_SIZE">INITIAL_KEY_BUFFER_SIZE</a></span> - Static variable in class org.apache.hadoop.hbase.io.encoding.<a href="./org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">TODO: This datablockencoder is dealing in internals of hfileblocks.</div>
+</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/TimeRangeTracker.html#INITIAL_MINIMUM_TIMESTAMP">INITIAL_MINIMUM_TIMESTAMP</a></span> - Static variable in class org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/TimeRangeTracker.html" title="class in org.apache.hadoop.hbase.regionserver">TimeRangeTracker</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.html#INITIAL_PER_CELL_ARRAY_SIZES">INITIAL_PER_CELL_ARRAY_SIZES</a></span> - Static variable in class org.apache.hadoop.hbase.codec.prefixtree.encode.<a href="./org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.html" title="class in org.apache.hadoop.hbase.codec.prefixtree.encode">PrefixTreeEncoder</a></dt>
@@ -51147,6 +51147,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFile.html#LOG">LOG</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#LOG">LOG</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html#LOG">LOG</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html#LOG">LOG</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFilePrettyPrinter</a></dt>
@@ -61834,7 +61836,8 @@ service.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#positionalReadWithExtra(org.apache.hadoop.fs.FSDataInputStream,%20long,%20byte[],%20int,%20int,%20int)">positionalReadWithExtra(FSDataInputStream, long, byte[], int, int, int)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>
-<div class="block">Read from an input stream.</div>
+<div class="block">Read from an input stream at least <code>necessaryLen</code> and if possible,
+ <code>extraLen</code> also if available.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/codec/prefixtree/decode/column/ColumnNodeReader.html#positionAt(int)">positionAt(int)</a></span> - Method in class org.apache.hadoop.hbase.codec.prefixtree.decode.column.<a href="./org/apache/hadoop/hbase/codec/prefixtree/decode/column/ColumnNodeReader.html" title="class in org.apache.hadoop.hbase.codec.prefixtree.decode.column">ColumnNodeReader</a></dt>
 <dd>
@@ -67994,7 +67997,8 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#readWithExtra(java.io.InputStream,%20byte[],%20int,%20int,%20int)">readWithExtra(InputStream, byte[], int, int, int)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>
-<div class="block">Read from an input stream.</div>
+<div class="block">Read from an input stream at least <code>necessaryLen</code> and if possible,
+ <code>extraLen</code> also if available.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html#readWriteLock">readWriteLock</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion.RowLockContext</a></dt>
 <dd>&nbsp;</dd>
@@ -76295,7 +76299,9 @@ service.</div>
  the <a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase"><code>HTableDescriptor</code></a></div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#serializeExtraInfo(java.nio.ByteBuffer)">serializeExtraInfo(ByteBuffer)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">Write out the content of EXTRA_SERIALIZATION_SPACE.</div>
+</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/wal/WALKey.html#serializeReplicationScope(boolean)">serializeReplicationScope(boolean)</a></span> - Method in class org.apache.hadoop.hbase.wal.<a href="./org/apache/hadoop/hbase/wal/WALKey.html" title="class in org.apache.hadoop.hbase.wal">WALKey</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.html#serializeStateData(java.io.OutputStream)">serializeStateData(OutputStream)</a></span> - Method in class org.apache.hadoop.hbase.master.procedure.<a href="./org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.html" title="class in org.apache.hadoop.hbase.master.procedure">AddColumnFamilyProcedure</a></dt>
@@ -88253,6 +88259,8 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#toString()">toString()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReaderImpl</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#toString()">toString()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.PrefetchedHeader</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#toString()">toString()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.ByteArrayKeyBlockIndexReader.html#toString()">toString()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.ByteArrayKeyBlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.ByteArrayKeyBlockIndexReader</a></dt>
@@ -88870,7 +88878,7 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#totalChecksumBytes()">totalChecksumBytes()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
 <dd>
-<div class="block">Calcuate the number of bytes required to store all the checksums
+<div class="block">Calculate the number of bytes required to store all the checksums
  for this block.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html#totalCompactedSize">totalCompactedSize</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.compactions.<a href="./org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionProgress</a></dt>
@@ -89880,6 +89888,10 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/BufferedMutatorParams.html#UNSET">UNSET</a></span> - Static variable in class org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/BufferedMutatorParams.html" title="class in org.apache.hadoop.hbase.client">BufferedMutatorParams</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html#UNSET">UNSET</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#UNSET">UNSET</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileWriterImpl</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/util/AbstractByteRange.html#unset()">unset()</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="./org/apache/hadoop/hbase/util/AbstractByteRange.html" title="class in org.apache.hadoop.hbase.util">AbstractByteRange</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/util/AbstractPositionedByteRange.html#unset()">unset()</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="./org/apache/hadoop/hbase/util/AbstractPositionedByteRange.html" title="class in org.apache.hadoop.hbase.util">AbstractPositionedByteRange</a></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/HConstants.Modify.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.Modify.html b/devapidocs/org/apache/hadoop/hbase/HConstants.Modify.html
index d30473d..bd7355b 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.Modify.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.Modify.html
@@ -109,7 +109,7 @@
 <hr>
 <br>
 <pre><a href="../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public static enum <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.610">HConstants.Modify</a>
+public static enum <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.612">HConstants.Modify</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a>&gt;</pre>
 <div class="block">modifyTable op for replacing the table descriptor</div>
 </li>
@@ -210,7 +210,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>CLOSE_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.611">CLOSE_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.613">CLOSE_REGION</a></pre>
 </li>
 </ul>
 <a name="TABLE_COMPACT">
@@ -219,7 +219,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_COMPACT</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.612">TABLE_COMPACT</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.614">TABLE_COMPACT</a></pre>
 </li>
 </ul>
 <a name="TABLE_FLUSH">
@@ -228,7 +228,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_FLUSH</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.613">TABLE_FLUSH</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.615">TABLE_FLUSH</a></pre>
 </li>
 </ul>
 <a name="TABLE_MAJOR_COMPACT">
@@ -237,7 +237,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_MAJOR_COMPACT</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.614">TABLE_MAJOR_COMPACT</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.616">TABLE_MAJOR_COMPACT</a></pre>
 </li>
 </ul>
 <a name="TABLE_SET_HTD">
@@ -246,7 +246,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_SET_HTD</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.615">TABLE_SET_HTD</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.617">TABLE_SET_HTD</a></pre>
 </li>
 </ul>
 <a name="TABLE_SPLIT">
@@ -255,7 +255,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TABLE_SPLIT</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.616">TABLE_SPLIT</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.618">TABLE_SPLIT</a></pre>
 </li>
 </ul>
 </li>
@@ -272,7 +272,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a>[]&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.610">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a>[]&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.612">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -289,7 +289,7 @@ for (HConstants.Modify c : HConstants.Modify.values())
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.610">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.612">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html b/devapidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
index 22b729f..7eb72e5 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
@@ -109,7 +109,7 @@
 <hr>
 <br>
 <pre><a href="../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public static enum <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.88">HConstants.OperationStatusCode</a>
+public static enum <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.90">HConstants.OperationStatusCode</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a>&gt;</pre>
 <div class="block">Status codes used for return values of bulk operations.</div>
 </li>
@@ -207,7 +207,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_RUN</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.89">NOT_RUN</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.91">NOT_RUN</a></pre>
 </li>
 </ul>
 <a name="SUCCESS">
@@ -216,7 +216,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>SUCCESS</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.90">SUCCESS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.92">SUCCESS</a></pre>
 </li>
 </ul>
 <a name="BAD_FAMILY">
@@ -225,7 +225,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>BAD_FAMILY</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.91">BAD_FAMILY</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.93">BAD_FAMILY</a></pre>
 </li>
 </ul>
 <a name="SANITY_CHECK_FAILURE">
@@ -234,7 +234,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>SANITY_CHECK_FAILURE</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.92">SANITY_CHECK_FAILURE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.94">SANITY_CHECK_FAILURE</a></pre>
 </li>
 </ul>
 <a name="FAILURE">
@@ -243,7 +243,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FAILURE</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.93">FAILURE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.95">FAILURE</a></pre>
 </li>
 </ul>
 </li>
@@ -260,7 +260,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a>[]&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.88">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a>[]&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.90">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -277,7 +277,7 @@ for (HConstants.OperationStatusCode c : HConstants.OperationStatusCode.values())
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.88">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.90">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 


[49/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
----------------------------------------------------------------------
diff --git a/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html b/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
index f4600f2..206dbca 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html
@@ -306,7 +306,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter">CompareFilter.CompareOp</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html#line.111">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter">CompareFilter.CompareOp</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html#line.118">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -323,7 +323,7 @@ for (CompareFilter.CompareOp c : CompareFilter.CompareOp.values())
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter">CompareFilter.CompareOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html#line.111">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter">CompareFilter.CompareOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html#line.118">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
----------------------------------------------------------------------
diff --git a/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html b/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
index 57979da..ae0edb8 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
@@ -106,7 +106,7 @@
 <br>
 <pre>@InterfaceAudience.Public
 @InterfaceStability.Stable
-public abstract class <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.101">TableInputFormatBase</a>
+public abstract class <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.100">TableInputFormatBase</a>
 extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>,<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&gt;</pre>
 <div class="block">A base for <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableInputFormat.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableInputFormat</code></a>s. Receives a <a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client"><code>Connection</code></a>, a <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase"><code>TableName</code></a>,
  an <a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client"><code>Scan</code></a> instance that defines the input columns etc. Subclasses may use
@@ -325,7 +325,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>MAPREDUCE_INPUT_AUTOBALANCE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.105">MAPREDUCE_INPUT_AUTOBALANCE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.104">MAPREDUCE_INPUT_AUTOBALANCE</a></pre>
 <div class="block">Specify if we enable auto-balance for input in M/R jobs.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.MAPREDUCE_INPUT_AUTOBALANCE">Constant Field Values</a></dd></dl>
 </li>
@@ -336,7 +336,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>INPUT_AUTOBALANCE_MAXSKEWRATIO</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.108">INPUT_AUTOBALANCE_MAXSKEWRATIO</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.107">INPUT_AUTOBALANCE_MAXSKEWRATIO</a></pre>
 <div class="block">Specify if ratio for data skew in M/R jobs, it goes well with the enabling hbase.mapreduce
  .input.autobalance property.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.INPUT_AUTOBALANCE_MAXSKEWRATIO">Constant Field Values</a></dd></dl>
@@ -348,7 +348,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TABLE_ROW_TEXTKEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.112">TABLE_ROW_TEXTKEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.111">TABLE_ROW_TEXTKEY</a></pre>
 <div class="block">Specify if the row key in table is text (ASCII between 32~126),
  default is true. False means the table is using binary row key</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.TABLE_ROW_TEXTKEY">Constant Field Values</a></dd></dl>
@@ -368,7 +368,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TableInputFormatBase</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.101">TableInputFormatBase</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.100">TableInputFormatBase</a>()</pre>
 </li>
 </ul>
 </li>
@@ -385,7 +385,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>createRecordReader</h4>
-<pre>public&nbsp;org.apache.hadoop.mapreduce.RecordReader&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>,<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.156">createRecordReader</a>(org.apache.hadoop.mapreduce.InputSplit&nbsp;split,
+<pre>public&nbsp;org.apache.hadoop.mapreduce.RecordReader&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>,<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.155">createRecordReader</a>(org.apache.hadoop.mapreduce.InputSplit&nbsp;split,
                                                                                          org.apache.hadoop.mapreduce.TaskAttemptContext&nbsp;context)
                                                                                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Builds a <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableRecordReader</code></a>. If no <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableRecordReader</code></a> was provided, uses
@@ -407,7 +407,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>getStartEndKeys</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;byte[][],byte[][]&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.217">getStartEndKeys</a>()
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;byte[][],byte[][]&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.216">getStartEndKeys</a>()
                                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -419,7 +419,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>getSplits</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.232">getSplits</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.231">getSplits</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
                                                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Calculates the splits that will serve as input for the map tasks. The
  number of splits matches the number of regions in a table.</div>
@@ -439,7 +439,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>includeRegionInSplit</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.545">includeRegionInSplit</a>(byte[]&nbsp;startKey,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.544">includeRegionInSplit</a>(byte[]&nbsp;startKey,
                            byte[]&nbsp;endKey)</pre>
 <div class="block">Test if the given region is to be included in the InputSplit while splitting
  the regions of a table.
@@ -463,7 +463,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionLocator</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.552">getRegionLocator</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.551">getRegionLocator</a>()</pre>
 <div class="block">Allows subclasses to get the <a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client"><code>RegionLocator</code></a>.</div>
 </li>
 </ul>
@@ -473,7 +473,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>getTable</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.562">getTable</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.561">getTable</a>()</pre>
 <div class="block">Allows subclasses to get the <a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client"><code>Table</code></a>.</div>
 </li>
 </ul>
@@ -483,7 +483,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>getAdmin</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.572">getAdmin</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.571">getAdmin</a>()</pre>
 <div class="block">Allows subclasses to get the <a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client"><code>Admin</code></a>.</div>
 </li>
 </ul>
@@ -493,7 +493,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>initializeTable</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.586">initializeTable</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;connection,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.585">initializeTable</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;connection,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Allows subclasses to initialize the table information.</div>
@@ -508,7 +508,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>getScan</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.602">getScan</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.601">getScan</a>()</pre>
 <div class="block">Gets the scan defining the actual details like columns etc.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>The internal scan instance.</dd></dl>
 </li>
@@ -519,7 +519,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>setScan</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.612">setScan</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.611">setScan</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan)</pre>
 <div class="block">Sets the scan defining the actual details like columns etc.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>scan</code> - The scan to set.</dd></dl>
 </li>
@@ -530,7 +530,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>setTableRecordReader</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.622">setTableRecordReader</a>(<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce">TableRecordReader</a>&nbsp;tableRecordReader)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.621">setTableRecordReader</a>(<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce">TableRecordReader</a>&nbsp;tableRecordReader)</pre>
 <div class="block">Allows subclasses to set the <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableRecordReader</code></a>.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>tableRecordReader</code> - A different <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableRecordReader</code></a>
    implementation.</dd></dl>
@@ -542,7 +542,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>initialize</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.641">initialize</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.640">initialize</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Handle subclass specific set up.
  Each of the entry points used by the MapReduce framework,
@@ -566,7 +566,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockListLast">
 <li class="blockList">
 <h4>closeTable</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.650">closeTable</a>()
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.649">closeTable</a>()
                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Close the Table and related objects that were initialized via
  <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#initializeTable(org.apache.hadoop.hbase.client.Connection,%20org.apache.hadoop.hbase.TableName)"><code>initializeTable(Connection, TableName)</code></a>.</div>


[09/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
index 8fd15a0..da22771 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
@@ -186,741 +186,742 @@
 <span class="sourceLineNo">178</span>   * The number of bytes per checksum.<a name="line.178"></a>
 <span class="sourceLineNo">179</span>   */<a name="line.179"></a>
 <span class="sourceLineNo">180</span>  public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  // For measuring number of checksum failures<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  static final Counter checksumFailures = new Counter();<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  // for test purpose<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static final Counter dataBlockReadCnt = new Counter();<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Number of checksum verification failures. It also<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * clears the counter.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  public static final long getChecksumFailuresCount() {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    long count = checksumFailures.get();<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    checksumFailures.set(0);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    return count;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /** API required to write an {@link HFile} */<a name="line.197"></a>
-<span class="sourceLineNo">198</span>  public interface Writer extends Closeable {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    /** Add an element to the file info map. */<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>    void append(Cell cell) throws IOException;<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>    /** @return the path to this {@link HFile} */<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    Path getPath();<a name="line.208"></a>
-<span class="sourceLineNo">209</span><a name="line.209"></a>
-<span class="sourceLineNo">210</span>    /**<a name="line.210"></a>
-<span class="sourceLineNo">211</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.211"></a>
-<span class="sourceLineNo">212</span>     * a compound Bloom filter writer.<a name="line.212"></a>
-<span class="sourceLineNo">213</span>     */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    /**<a name="line.223"></a>
-<span class="sourceLineNo">224</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.224"></a>
-<span class="sourceLineNo">225</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.225"></a>
-<span class="sourceLineNo">226</span>     * in HFile version 1 and version 2.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>     */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>    /**<a name="line.230"></a>
-<span class="sourceLineNo">231</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.231"></a>
-<span class="sourceLineNo">232</span>     * HFile V2.<a name="line.232"></a>
-<span class="sourceLineNo">233</span>     */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>    /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>     * Return the file context for the HFile this writer belongs to<a name="line.237"></a>
-<span class="sourceLineNo">238</span>     */<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    HFileContext getFileContext();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * we want to be able to swap writer implementations.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static class WriterFactory {<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    protected final Configuration conf;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    protected final CacheConfig cacheConf;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    protected FileSystem fs;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    protected Path path;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    protected FSDataOutputStream ostream;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    protected CellComparator comparator = <a name="line.252"></a>
-<span class="sourceLineNo">253</span>        CellComparator.COMPARATOR;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    protected InetSocketAddress[] favoredNodes;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    private HFileContext fileContext;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    protected boolean shouldDropBehind = false;<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.conf = conf;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.cacheConf = cacheConf;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span><a name="line.262"></a>
-<span class="sourceLineNo">263</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      Preconditions.checkNotNull(fs);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(path);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      this.fs = fs;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.path = path;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return this;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      Preconditions.checkNotNull(ostream);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      this.ostream = ostream;<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return this;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      Preconditions.checkNotNull(comparator);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      this.comparator = comparator;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      return this;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      // Deliberately not checking for null here.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      this.favoredNodes = favoredNodes;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return this;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      this.fileContext = fileContext;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      return this;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>      return this;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  // For measuring number of checksum failures<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  static final Counter CHECKSUM_FAILURES = new Counter();<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>  // For tests. Gets incremented when we read a block whether from HDFS or from Cache.<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  public static final Counter DATABLOCK_READ_COUNT = new Counter();<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /**<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * Number of checksum verification failures. It also<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * clears the counter.<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final long getChecksumFailuresCount() {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    long count = CHECKSUM_FAILURES.get();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    CHECKSUM_FAILURES.set(0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    return count;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  /** API required to write an {@link HFile} */<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  public interface Writer extends Closeable {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    /** Add an element to the file info map. */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>    void append(Cell cell) throws IOException;<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>    /** @return the path to this {@link HFile} */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    Path getPath();<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.212"></a>
+<span class="sourceLineNo">213</span>     * a compound Bloom filter writer.<a name="line.213"></a>
+<span class="sourceLineNo">214</span>     */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span><a name="line.216"></a>
+<span class="sourceLineNo">217</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>    /**<a name="line.224"></a>
+<span class="sourceLineNo">225</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.225"></a>
+<span class="sourceLineNo">226</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.226"></a>
+<span class="sourceLineNo">227</span>     * in HFile version 1 and version 2.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>     */<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    /**<a name="line.231"></a>
+<span class="sourceLineNo">232</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.232"></a>
+<span class="sourceLineNo">233</span>     * HFile V2.<a name="line.233"></a>
+<span class="sourceLineNo">234</span>     */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    /**<a name="line.237"></a>
+<span class="sourceLineNo">238</span>     * Return the file context for the HFile this writer belongs to<a name="line.238"></a>
+<span class="sourceLineNo">239</span>     */<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    HFileContext getFileContext();<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /**<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * we want to be able to swap writer implementations.<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  public static class WriterFactory {<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    protected final Configuration conf;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    protected final CacheConfig cacheConf;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    protected FileSystem fs;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    protected Path path;<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    protected FSDataOutputStream ostream;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    protected CellComparator comparator = <a name="line.253"></a>
+<span class="sourceLineNo">254</span>        CellComparator.COMPARATOR;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    protected InetSocketAddress[] favoredNodes;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    private HFileContext fileContext;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    protected boolean shouldDropBehind = false;<a name="line.257"></a>
+<span class="sourceLineNo">258</span><a name="line.258"></a>
+<span class="sourceLineNo">259</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.conf = conf;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.cacheConf = cacheConf;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(fs);<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      Preconditions.checkNotNull(path);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      this.fs = fs;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      this.path = path;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      return this;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span><a name="line.271"></a>
+<span class="sourceLineNo">272</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      Preconditions.checkNotNull(ostream);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      this.ostream = ostream;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      return this;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      Preconditions.checkNotNull(comparator);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      this.comparator = comparator;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      // Deliberately not checking for null here.<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      this.favoredNodes = favoredNodes;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      return this;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      this.fileContext = fileContext;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      return this;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      return this;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
 <span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    public Writer create() throws IOException {<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            "filesystem/path or path");<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      if (path != null) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        try {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (UnsupportedOperationException uoe) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /** The configuration key for HFile version to use for new files */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static int getFormatVersion(Configuration conf) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    checkFormatVersion(version);<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    return version;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
-<span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  /**<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * Disables block cache access for all writers created through the<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * returned factory.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.332"></a>
-<span class="sourceLineNo">333</span>       conf) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Configuration tempConf = new Configuration(conf);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  /**<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      CacheConfig cacheConf) {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    int version = getFormatVersion(conf);<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    switch (version) {<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    case 2:<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.347"></a>
-<span class="sourceLineNo">348</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        "in hbase-site.xml)");<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    case 3:<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    default:<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.354"></a>
-<span class="sourceLineNo">355</span>          "format version " + version);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * An abstraction used by the block index.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  public interface CachingBlockReader {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    /**<a name="line.365"></a>
-<span class="sourceLineNo">366</span>     * Read in a file block.<a name="line.366"></a>
-<span class="sourceLineNo">367</span>     * @param offset offset to read.<a name="line.367"></a>
-<span class="sourceLineNo">368</span>     * @param onDiskBlockSize size of the block<a name="line.368"></a>
-<span class="sourceLineNo">369</span>     * @param cacheBlock<a name="line.369"></a>
-<span class="sourceLineNo">370</span>     * @param pread<a name="line.370"></a>
-<span class="sourceLineNo">371</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.371"></a>
-<span class="sourceLineNo">372</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.372"></a>
-<span class="sourceLineNo">373</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.373"></a>
-<span class="sourceLineNo">374</span>     *  caching efficiency of encoded data blocks)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.375"></a>
-<span class="sourceLineNo">376</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.376"></a>
-<span class="sourceLineNo">377</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.377"></a>
-<span class="sourceLineNo">378</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>     * @return Block wrapped in a ByteBuffer.<a name="line.379"></a>
-<span class="sourceLineNo">380</span>     * @throws IOException<a name="line.380"></a>
-<span class="sourceLineNo">381</span>     */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.382"></a>
-<span class="sourceLineNo">383</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        throws IOException;<a name="line.386"></a>
-<span class="sourceLineNo">387</span><a name="line.387"></a>
-<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
-<span class="sourceLineNo">389</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.389"></a>
-<span class="sourceLineNo">390</span>     * @param block Block to be returned.<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     */<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    void returnBlock(HFileBlock block);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  }<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    /**<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * write.<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     */<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    String getName();<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>    CellComparator getComparator();<a name="line.404"></a>
-<span class="sourceLineNo">405</span><a name="line.405"></a>
-<span class="sourceLineNo">406</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    Cell getLastKey();<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    Cell midkey() throws IOException;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>    long length();<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>    long getEntries();<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>    Cell getFirstKey();<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    long indexSize();<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>    byte[] getFirstRowKey();<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>    byte[] getLastRowKey();<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    FixedFileTrailer getTrailer();<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>    /**<a name="line.436"></a>
-<span class="sourceLineNo">437</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.437"></a>
-<span class="sourceLineNo">438</span>     * {@link HFile} version.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>     * Knows nothing about how that metadata is structured.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>     */<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * {@link HFile}  version.<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * Knows nothing about how that metadata is structured.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     */<a name="line.447"></a>
-<span class="sourceLineNo">448</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.448"></a>
-<span class="sourceLineNo">449</span><a name="line.449"></a>
-<span class="sourceLineNo">450</span>    Path getPath();<a name="line.450"></a>
-<span class="sourceLineNo">451</span><a name="line.451"></a>
-<span class="sourceLineNo">452</span>    /** Close method with optional evictOnClose */<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    void close(boolean evictOnClose) throws IOException;<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.455"></a>
-<span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    boolean hasMVCCInfo();<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>     * Return the file context of the HFile this reader belongs to<a name="line.460"></a>
-<span class="sourceLineNo">461</span>     */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    HFileContext getFileContext();<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    <a name="line.463"></a>
-<span class="sourceLineNo">464</span>    boolean isPrimaryReplicaReader();<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    <a name="line.465"></a>
-<span class="sourceLineNo">466</span>    void setPrimaryReplicaReader(boolean isPrimaryReplicaReader);<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    boolean shouldIncludeMemstoreTS();<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>    boolean isDecodeMemstoreTS();<a name="line.470"></a>
-<span class="sourceLineNo">471</span><a name="line.471"></a>
-<span class="sourceLineNo">472</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @VisibleForTesting<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    @VisibleForTesting<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    boolean prefetchComplete();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>  }<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  /**<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * Method returns the reader given the specified arguments.<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   *<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * @param path hfile's path<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * @param fsdis stream of path's file<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * @param size max size of the trailer.<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * @param hfs<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * @return an appropriate instance of HFileReader<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   */<a name="line.492"></a>
-<span class="sourceLineNo">493</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      justification="Intentional")<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  private static Reader pickReaderVersion(Path path, FSDataInputStreamWrapper fsdis,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      long size, CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FixedFileTrailer trailer = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    try {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      switch (trailer.getMajorVersion()) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      case 2:<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        LOG.debug("Opening HFile v2 with v3 reader");<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      case 3 :<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs, conf);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      default:<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (Throwable t) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      try {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        fsdis.close();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      } catch (Throwable t2) {<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper", t2);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    }<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  /**<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param fs A file system<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param path Path to HFile<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @param fsdis a stream of path's file<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @param size max size of the trailer.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * @param conf Configuration<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * @return A version specific Hfile Reader<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("resource")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  public static Reader createReader(FileSystem fs, Path path,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      FSDataInputStreamWrapper fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      throws IOException {<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    HFileSystem hfs = null;<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // the filesystem.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    if (!(fs instanceof HFileSystem)) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      hfs = new HFileSystem(fs);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } else {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      hfs = (HFileSystem)fs;<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    return pickReaderVersion(path, fsdis, size, cacheConf, hfs, conf);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   *<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * @param fs filesystem<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * @param path Path to file to read<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * @param cacheConf This must not be null.  @see {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * @return an active Reader instance<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   */<a name="line.556"></a>
-<span class="sourceLineNo">557</span>  public static Reader createReader(<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(),<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      cacheConf, stream.getHfs(), conf);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>  /**<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * This factory method is used only by unit tests<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  static Reader createReaderFromStream(Path path,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      FSDataInputStream fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    return pickReaderVersion(path, wrapper, size, cacheConf, null, conf);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  /**<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * @param fs filesystem<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   * @param path Path to file to verify<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   * @throws IOException if failed to read from the underlying stream<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
-<span class="sourceLineNo">582</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   * @param fs filesystem<a name="line.588"></a>
-<span class="sourceLineNo">589</span>   * @param fileStatus the file to verify<a name="line.589"></a>
-<span class="sourceLineNo">590</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * @throws IOException if failed to read from the underlying stream<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   */<a name="line.592"></a>
-<span class="sourceLineNo">593</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      throws IOException {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    final Path path = fileStatus.getPath();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    final long size = fileStatus.getLen();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    try {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.599"></a>
-<span class="sourceLineNo">600</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      return true;<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    } catch (IllegalArgumentException e) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      return false;<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    } catch (IOException e) {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      throw e;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } finally {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      try {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        fsdis.close();<a name="line.609"></a>
-<span class="sourceLineNo">610</span>      } catch (Throwable t) {<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper: " + path, t);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      }<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>  }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>  /**<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;byte [], byte []&gt;(Bytes.BYTES_COMPARATOR);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    public FileInfo() {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      super();<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
-<span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * key prefix.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     *<a name="line.638"></a>
-<span class="sourceLineNo">639</span>     * @param k key to add<a name="line.639"></a>
-<span class="sourceLineNo">640</span>     * @param v value to add<a name="line.640"></a>
-<span class="sourceLineNo">641</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     *          with the reserved prefix<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     * @return this file info object<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     * @throws IOException if the key or value is invalid<a name="line.644"></a>
-<span class="sourceLineNo">645</span>     */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        final boolean checkPrefix) throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      if (k == null || v == null) {<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        throw new NullPointerException("Key nor value may be null");<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      }<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.652"></a>
-<span class="sourceLineNo">653</span>            + " are reserved");<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      put(k, v);<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return this;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span><a name="line.658"></a>
-<span class="sourceLineNo">659</span>    public void clear() {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      this.map.clear();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span><a name="line.662"></a>
-<span class="sourceLineNo">663</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>      return map.comparator();<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    }<a name="line.665"></a>
-<span class="sourceLineNo">666</span><a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public boolean containsKey(Object key) {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return map.containsKey(key);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    public boolean containsValue(Object value) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      return map.containsValue(value);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      return map.entrySet();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    public boolean equals(Object o) {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      return map.equals(o);<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span><a name="line.682"></a>
-<span class="sourceLineNo">683</span>    public byte[] firstKey() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      return map.firstKey();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
-<span class="sourceLineNo">686</span><a name="line.686"></a>
-<span class="sourceLineNo">687</span>    public byte[] get(Object key) {<a name="line.687"></a>
-<span class="sourceLineNo">688</span>      return map.get(key);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>    }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public int hashCode() {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      return map.hashCode();<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>      return this.map.headMap(toKey);<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    }<a name="line.697"></a>
-<span class="sourceLineNo">698</span><a name="line.698"></a>
-<span class="sourceLineNo">699</span>    public boolean isEmpty() {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return map.isEmpty();<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      return map.keySet();<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>    public byte[] lastKey() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>      return map.lastKey();<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    }<a name="line.709"></a>
-<span class="sourceLineNo">710</span><a name="line.710"></a>
-<span class="sourceLineNo">711</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      return this.map.put(key, value);<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    }<a name="line.713"></a>
-<span class="sourceLineNo">714</span><a name="line.714"></a>
-<span class="sourceLineNo">715</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.map.putAll(m);<a name="line.716"></a>
-<span class="sourceLineNo">717</span>    }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public byte[] remove(Object key) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return this.map.remove(key);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    public int size() {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      return map.size();<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    }<a name="line.725"></a>
-<span class="sourceLineNo">726</span><a name="line.726"></a>
-<span class="sourceLineNo">727</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return this.map.subMap(fromKey, toKey);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      return this.map.tailMap(fromKey);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    }<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>    public Collection&lt;byte[]&gt; values() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      return map.values();<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>    /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.740"></a>
-<span class="sourceLineNo">741</span>     * We write it as a protobuf.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>     * @param out<a name="line.742"></a>
-<span class="sourceLineNo">743</span>     * @throws IOException<a name="line.743"></a>
-<span class="sourceLineNo">744</span>     * @see #read(DataInputStream)<a name="line.744"></a>
-<span class="sourceLineNo">745</span>     */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    void write(final DataOutputStream out) throws IOException {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>        bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));<a name="line.751"></a>
-<span class="sourceLineNo">752</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      }<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      builder.build().writeDelimitedTo(out);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.759"></a>
-<span class="sourceLineNo">760</span>     * Can deserialize protobuf of old Writables format.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>     * @param in<a name="line.761"></a>
-<span class="sourceLineNo">762</span>     * @throws IOException<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * @see #write(DataOutputStream)<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    void read(final DataInputStream in) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      byte [] pbuf = new byte[pblen];<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (in.markSupported()) in.mark(pblen);<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      int read = in.read(pbuf);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      } else {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>        if (in.markSupported()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>          in.reset();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>          parseWritable(in);<a name="line.777"></a>
-<span class="sourceLineNo">778</span>        } else {<a name="line.778"></a>
-<span class="sourceLineNo">779</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.782"></a>
-<span class="sourceLineNo">783</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.783"></a>
-<span class="sourceLineNo">784</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.784"></a>
-<span class="sourceLineNo">785</span>          parseWritable(new DataInputStream(sis));<a name="line.785"></a>
-<span class="sourceLineNo">786</span>        }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.790"></a>
-<span class="sourceLineNo">791</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.791"></a>
-<span class="sourceLineNo">792</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>     * @throws IOException<a name="line.793"></a>
-<span class="sourceLineNo">794</span>     */<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      this.map.clear();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      // Read the number of entries in the map<a name="line.798"></a>
-<span class="sourceLineNo">799</span>      int entries = in.readInt();<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      // Then read each key/value pair<a name="line.800"></a>
-<span class="sourceLineNo">801</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        byte [] key = Bytes.readByteArray(in);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        in.readByte();<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        byte [] value = Bytes.readByteArray(in);<a name="line.805"></a>
-<span class="sourceLineNo">806</span>        this.map.put(key, value);<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>    }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span>    /**<a name="line.810"></a>
-<span class="sourceLineNo">811</span>     * Fill our map with content of the pb we read off disk<a name="line.811"></a>
-<span class="sourceLineNo">812</span>     * @param fip protobuf message to read<a name="line.812"></a>
-<span class="sourceLineNo">813</span>     */<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      this.map.clear();<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    }<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.822"></a>
-<span class="sourceLineNo">823</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.824"></a>
-<span class="sourceLineNo">825</span>  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span><a name="line.826"></a>
-<span class="sourceLineNo">827</span>  /**<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * HFile.Writer.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   *<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * @return Array of strings, each represents a supported compression<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   *         supported.<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   *         &lt;ul&gt;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>   *         &lt;li&gt;"none" - No compression.<a name="line.835"></a>
-<span class="sourceLineNo">836</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   *         &lt;/ul&gt;<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   */<a name="line.838"></a>
-<span class="sourceLineNo">839</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    return Compression.getSupportedAlgorithms();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
-<span class="sourceLineNo">842</span><a name="line.842"></a>
-<span class="sourceLineNo">843</span>  // Utility methods.<a name="line.843"></a>
-<span class="sourceLineNo">844</span>  /*<a name="line.844"></a>
-<span class="sourceLineNo">845</span>   * @param l Long to convert to an int.<a name="line.845"></a>
-<span class="sourceLineNo">846</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
-<span class="sourceLineNo">848</span>  static int longToInt(final long l) {<a name="line.848"></a>
-<span class="sourceLineNo">849</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.851"></a>
-<span class="sourceLineNo">852</span>  }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>  /**<a name="line.854"></a>
-<span class="sourceLineNo">855</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.855"></a>
-<span class="sourceLineNo">856</span>   * empty list.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>   *<a name="line.857"></a>
-<span class="sourceLineNo">858</span>   * @param fs  The file system reference.<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * @param regionDir  The region directory to scan.<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * @return The list of files found.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * @throws IOException When scanning the files fails.<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   */<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws IOException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;Path&gt;();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for(FileStatus dir : familyDirs) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      for (FileStatus file : files) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        if (!file.isDirectory() &amp;&amp;<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          regionHFiles.add(file.getPath());<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>    }<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    return regionHFiles;<a name="line.878"></a>
-<span class="sourceLineNo">879</span>  }<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>  /**<a name="line.881"></a>
-<span class="sourceLineNo">882</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.882"></a>
-<span class="sourceLineNo">883</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.883"></a>
-<span class="sourceLineNo">884</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.884"></a>
-<span class="sourceLineNo">885</span>   * indicate that this is not a software error, but corrupted input.<a name="line.885"></a>
-<span class="sourceLineNo">886</span>   *<a name="line.886"></a>
-<span class="sourceLineNo">887</span>   * @param version an HFile version<a name="line.887"></a>
-<span class="sourceLineNo">888</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   */<a name="line.889"></a>
-<span class="sourceLineNo">890</span>  public static void checkFormatVersion(int version)<a name="line.890"></a>
-<span class="sourceLineNo">891</span>      throws IllegalArgumentException {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.894"></a>
-<span class="sourceLineNo">895</span>          + MAX_FORMAT_VERSION + ")");<a name="line.895"></a>
-<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
-<span class="sourceLineNo">898</span><a name="line.898"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>    public Writer create() throws IOException {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.303"></a>
+<span class="sourceLineNo">304</span>            "filesystem/path or path");<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      if (path != null) {<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        try {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        } catch (UnsupportedOperationException uoe) {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        }<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      }<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    }<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  /** The configuration key for HFile version to use for new files */<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  public static int getFormatVersion(Configuration conf) {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    checkFormatVersion(version);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return version;<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * Disables block cache access for all writers created through the<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * returned factory.<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   */<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.333"></a>
+<span class="sourceLineNo">334</span>       conf) {<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    Configuration tempConf = new Configuration(conf);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  /**<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      CacheConfig cacheConf) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    int version = getFormatVersion(conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    switch (version) {<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    case 2:<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.350"></a>
+<span class="sourceLineNo">351</span>        "in hbase-site.xml)");<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    case 3:<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    default:<a name="line.354"></a>
+<span class="sourceLineNo">355</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          "format version " + version);<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  /**<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * An abstraction used by the block index.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public interface CachingBlockReader {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>     * Read in a file block.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>     * @param offset offset to read.<a name="line.368"></a>
+<span class="sourceLineNo">369</span>     * @param onDiskBlockSize size of the block<a name="line.369"></a>
+<span class="sourceLineNo">370</span>     * @param cacheBlock<a name="line.370"></a>
+<span class="sourceLineNo">371</span>     * @param pread<a name="line.371"></a>
+<span class="sourceLineNo">372</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.372"></a>
+<span class="sourceLineNo">373</span>     * @param expectedBloc

<TRUNCATED>

[50/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/apidocs/org/apache/hadoop/hbase/HConstants.html
----------------------------------------------------------------------
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html b/apidocs/org/apache/hadoop/hbase/HConstants.html
index 08d16c3..ab87d96 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1913,7 +1913,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.71">HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.73">HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM</a></pre>
 <div class="block">The size data structures with minor version is 0</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM">Constant Field Values</a></dd></dl>
 </li>
@@ -1924,7 +1924,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HFILEBLOCK_HEADER_SIZE</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.77">HFILEBLOCK_HEADER_SIZE</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.79">HFILEBLOCK_HEADER_SIZE</a></pre>
 <div class="block">The size of a version 2 HFile block header, minor version 1.
  There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum
  followed by another 4 byte value to store sizeofDataOnDisk.</div>
@@ -1937,7 +1937,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HFILEBLOCK_DUMMY_HEADER</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.80">HFILEBLOCK_DUMMY_HEADER</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.82">HFILEBLOCK_DUMMY_HEADER</a></pre>
 <div class="block">Just an array of bytes of the right size.</div>
 </li>
 </ul>
@@ -1947,7 +1947,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZERO_L</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.97">ZERO_L</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.99">ZERO_L</a></pre>
 <div class="block">long constant for zero</div>
 </li>
 </ul>
@@ -1957,7 +1957,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>NINES</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.98">NINES</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.100">NINES</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.NINES">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -1967,7 +1967,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZEROES</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.99">ZEROES</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.101">ZEROES</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZEROES">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -1977,7 +1977,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>VERSION_FILE_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.104">VERSION_FILE_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.106">VERSION_FILE_NAME</a></pre>
 <div class="block">name of version file</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.VERSION_FILE_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -1988,7 +1988,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>FILE_SYSTEM_VERSION</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.115">FILE_SYSTEM_VERSION</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.117">FILE_SYSTEM_VERSION</a></pre>
 <div class="block">Current version of file system.
  Version 4 supports only one kind of bloom filter.
  Version 5 changes versions in catalog table regions.
@@ -2004,7 +2004,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_DISTRIBUTED</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.122">CLUSTER_DISTRIBUTED</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.124">CLUSTER_DISTRIBUTED</a></pre>
 <div class="block">Cluster is in distributed mode or not</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_DISTRIBUTED">Constant Field Values</a></dd></dl>
 </li>
@@ -2015,7 +2015,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_MASTER_LOADBALANCER_CLASS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.125">HBASE_MASTER_LOADBALANCER_CLASS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.127">HBASE_MASTER_LOADBALANCER_CLASS</a></pre>
 <div class="block">Config for pluggable load balancers</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOADBALANCER_CLASS">Constant Field Values</a></dd></dl>
 </li>
@@ -2026,7 +2026,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_MASTER_LOADBALANCE_BYTABLE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.128">HBASE_MASTER_LOADBALANCE_BYTABLE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.130">HBASE_MASTER_LOADBALANCE_BYTABLE</a></pre>
 <div class="block">Config for balancing the cluster by table</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOADBALANCE_BYTABLE">Constant Field Values</a></dd></dl>
 </li>
@@ -2037,7 +2037,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ENSEMBLE_TABLE_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.131">ENSEMBLE_TABLE_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.133">ENSEMBLE_TABLE_NAME</a></pre>
 <div class="block">The name of the ensemble table</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ENSEMBLE_TABLE_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2048,7 +2048,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_MASTER_NORMALIZER_CLASS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.134">HBASE_MASTER_NORMALIZER_CLASS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.136">HBASE_MASTER_NORMALIZER_CLASS</a></pre>
 <div class="block">Config for pluggable region normalizer</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_MASTER_NORMALIZER_CLASS">Constant Field Values</a></dd></dl>
 </li>
@@ -2059,7 +2059,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_IS_LOCAL</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.138">CLUSTER_IS_LOCAL</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.140">CLUSTER_IS_LOCAL</a></pre>
 <div class="block">Cluster is standalone or pseudo-distributed</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_IS_LOCAL">Constant Field Values</a></dd></dl>
 </li>
@@ -2070,7 +2070,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_IS_DISTRIBUTED</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.141">CLUSTER_IS_DISTRIBUTED</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.143">CLUSTER_IS_DISTRIBUTED</a></pre>
 <div class="block">Cluster is fully-distributed</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_IS_DISTRIBUTED">Constant Field Values</a></dd></dl>
 </li>
@@ -2081,7 +2081,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CLUSTER_DISTRIBUTED</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.144">DEFAULT_CLUSTER_DISTRIBUTED</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.146">DEFAULT_CLUSTER_DISTRIBUTED</a></pre>
 <div class="block">Default value for cluster distributed mode</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_CLUSTER_DISTRIBUTED">Constant Field Values</a></dd></dl>
 </li>
@@ -2092,7 +2092,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HOST</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.147">DEFAULT_HOST</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.149">DEFAULT_HOST</a></pre>
 <div class="block">default host address</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HOST">Constant Field Values</a></dd></dl>
 </li>
@@ -2103,7 +2103,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MASTER_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.150">MASTER_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.152">MASTER_PORT</a></pre>
 <div class="block">Parameter name for port master listens on.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MASTER_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2114,7 +2114,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MASTER_PORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.153">DEFAULT_MASTER_PORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.155">DEFAULT_MASTER_PORT</a></pre>
 <div class="block">default port that the master listens on</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_MASTER_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2125,7 +2125,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MASTER_INFOPORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.156">DEFAULT_MASTER_INFOPORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.158">DEFAULT_MASTER_INFOPORT</a></pre>
 <div class="block">default port for master web api</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_MASTER_INFOPORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2136,7 +2136,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MASTER_INFO_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.159">MASTER_INFO_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.161">MASTER_INFO_PORT</a></pre>
 <div class="block">Configuration key for master web API port</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MASTER_INFO_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2147,7 +2147,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MASTER_TYPE_BACKUP</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.162">MASTER_TYPE_BACKUP</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.164">MASTER_TYPE_BACKUP</a></pre>
 <div class="block">Parameter name for the master type being backup (waits for primary to go inactive).</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MASTER_TYPE_BACKUP">Constant Field Values</a></dd></dl>
 </li>
@@ -2158,7 +2158,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MASTER_TYPE_BACKUP</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.167">DEFAULT_MASTER_TYPE_BACKUP</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.169">DEFAULT_MASTER_TYPE_BACKUP</a></pre>
 <div class="block">by default every master is a possible primary master unless the conf explicitly overrides it</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_MASTER_TYPE_BACKUP">Constant Field Values</a></dd></dl>
 </li>
@@ -2169,7 +2169,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_QUORUM</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.170">ZOOKEEPER_QUORUM</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.172">ZOOKEEPER_QUORUM</a></pre>
 <div class="block">Name of ZooKeeper quorum configuration parameter.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_QUORUM">Constant Field Values</a></dd></dl>
 </li>
@@ -2180,7 +2180,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZK_CFG_PROPERTY_PREFIX</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.173">ZK_CFG_PROPERTY_PREFIX</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.175">ZK_CFG_PROPERTY_PREFIX</a></pre>
 <div class="block">Common prefix of ZooKeeper configuration properties</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZK_CFG_PROPERTY_PREFIX">Constant Field Values</a></dd></dl>
 </li>
@@ -2191,7 +2191,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZK_CFG_PROPERTY_PREFIX_LEN</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.176">ZK_CFG_PROPERTY_PREFIX_LEN</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.178">ZK_CFG_PROPERTY_PREFIX_LEN</a></pre>
 </li>
 </ul>
 <a name="CLIENT_PORT_STR">
@@ -2200,7 +2200,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLIENT_PORT_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.183">CLIENT_PORT_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.185">CLIENT_PORT_STR</a></pre>
 <div class="block">The ZK client port key in the ZK properties map. The name reflects the
  fact that this is not an HBase configuration key.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLIENT_PORT_STR">Constant Field Values</a></dd></dl>
@@ -2212,7 +2212,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_CLIENT_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.186">ZOOKEEPER_CLIENT_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.188">ZOOKEEPER_CLIENT_PORT</a></pre>
 <div class="block">Parameter name for the client port that the zookeeper listens on</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_CLIENT_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2223,7 +2223,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZOOKEPER_CLIENT_PORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.190">DEFAULT_ZOOKEPER_CLIENT_PORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.192">DEFAULT_ZOOKEPER_CLIENT_PORT</a></pre>
 <div class="block">Default client port that the zookeeper listens on</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEPER_CLIENT_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2234,7 +2234,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_RECOVERABLE_WAITTIME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.195">ZOOKEEPER_RECOVERABLE_WAITTIME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.197">ZOOKEEPER_RECOVERABLE_WAITTIME</a></pre>
 <div class="block">Parameter name for the wait time for the recoverable zookeeper</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_RECOVERABLE_WAITTIME">Constant Field Values</a></dd></dl>
 </li>
@@ -2245,7 +2245,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME</h4>
-<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.199">DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.201">DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME</a></pre>
 <div class="block">Default wait time for the recoverable zookeeper</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME">Constant Field Values</a></dd></dl>
 </li>
@@ -2256,7 +2256,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_ZNODE_PARENT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.202">ZOOKEEPER_ZNODE_PARENT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.204">ZOOKEEPER_ZNODE_PARENT</a></pre>
 <div class="block">Parameter name for the root dir in ZK for this cluster</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_ZNODE_PARENT">Constant Field Values</a></dd></dl>
 </li>
@@ -2267,7 +2267,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZOOKEEPER_ZNODE_PARENT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.204">DEFAULT_ZOOKEEPER_ZNODE_PARENT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.206">DEFAULT_ZOOKEEPER_ZNODE_PARENT</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2277,7 +2277,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_MAX_CLIENT_CNXNS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.210">ZOOKEEPER_MAX_CLIENT_CNXNS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.212">ZOOKEEPER_MAX_CLIENT_CNXNS</a></pre>
 <div class="block">Parameter name for the limit on concurrent client-side zookeeper
  connections</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_MAX_CLIENT_CNXNS">Constant Field Values</a></dd></dl>
@@ -2289,7 +2289,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_DATA_DIR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.214">ZOOKEEPER_DATA_DIR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.216">ZOOKEEPER_DATA_DIR</a></pre>
 <div class="block">Parameter name for the ZK data directory</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_DATA_DIR">Constant Field Values</a></dd></dl>
 </li>
@@ -2300,7 +2300,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_TICK_TIME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.218">ZOOKEEPER_TICK_TIME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.220">ZOOKEEPER_TICK_TIME</a></pre>
 <div class="block">Parameter name for the ZK tick time</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_TICK_TIME">Constant Field Values</a></dd></dl>
 </li>
@@ -2311,7 +2311,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.222">DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.224">DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS</a></pre>
 <div class="block">Default limit on concurrent client-side zookeeper connections</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS">Constant Field Values</a></dd></dl>
 </li>
@@ -2322,7 +2322,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZK_SESSION_TIMEOUT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.225">ZK_SESSION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.227">ZK_SESSION_TIMEOUT</a></pre>
 <div class="block">Configuration key for ZooKeeper session timeout</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZK_SESSION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2333,7 +2333,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZK_SESSION_TIMEOUT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.228">DEFAULT_ZK_SESSION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.230">DEFAULT_ZK_SESSION_TIMEOUT</a></pre>
 <div class="block">Default value for ZooKeeper session timeout</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZK_SESSION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2344,7 +2344,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_USEMULTI</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.231">ZOOKEEPER_USEMULTI</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.233">ZOOKEEPER_USEMULTI</a></pre>
 <div class="block">Configuration key for whether to use ZK.multi</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_USEMULTI">Constant Field Values</a></dd></dl>
 </li>
@@ -2355,7 +2355,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONSERVER_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.234">REGIONSERVER_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.236">REGIONSERVER_PORT</a></pre>
 <div class="block">Parameter name for port region server listens on.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGIONSERVER_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2366,7 +2366,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_REGIONSERVER_PORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.237">DEFAULT_REGIONSERVER_PORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.239">DEFAULT_REGIONSERVER_PORT</a></pre>
 <div class="block">Default port region server listens on.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_REGIONSERVER_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2377,7 +2377,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_REGIONSERVER_INFOPORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.240">DEFAULT_REGIONSERVER_INFOPORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.242">DEFAULT_REGIONSERVER_INFOPORT</a></pre>
 <div class="block">default port for region server web api</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_REGIONSERVER_INFOPORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2388,7 +2388,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONSERVER_INFO_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.243">REGIONSERVER_INFO_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.245">REGIONSERVER_INFO_PORT</a></pre>
 <div class="block">A configuration key for regionserver info port</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGIONSERVER_INFO_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2399,7 +2399,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONSERVER_INFO_PORT_AUTO</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.247">REGIONSERVER_INFO_PORT_AUTO</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.249">REGIONSERVER_INFO_PORT_AUTO</a></pre>
 <div class="block">A flag that enables automatic selection of regionserver info port</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGIONSERVER_INFO_PORT_AUTO">Constant Field Values</a></dd></dl>
 </li>
@@ -2410,7 +2410,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_SERVER_IMPL</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.251">REGION_SERVER_IMPL</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.253">REGION_SERVER_IMPL</a></pre>
 <div class="block">Parameter name for what region server implementation to use.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGION_SERVER_IMPL">Constant Field Values</a></dd></dl>
 </li>
@@ -2421,7 +2421,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MASTER_IMPL</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.254">MASTER_IMPL</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.256">MASTER_IMPL</a></pre>
 <div class="block">Parameter name for what master implementation to use.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MASTER_IMPL">Constant Field Values</a></dd></dl>
 </li>
@@ -2432,7 +2432,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASECLIENT_IMPL</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.257">HBASECLIENT_IMPL</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.259">HBASECLIENT_IMPL</a></pre>
 <div class="block">Parameter name for what hbase client implementation to use.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASECLIENT_IMPL">Constant Field Values</a></dd></dl>
 </li>
@@ -2443,7 +2443,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>THREAD_WAKE_FREQUENCY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.260">THREAD_WAKE_FREQUENCY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.262">THREAD_WAKE_FREQUENCY</a></pre>
 <div class="block">Parameter name for how often threads should wake up</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.THREAD_WAKE_FREQUENCY">Constant Field Values</a></dd></dl>
 </li>
@@ -2454,7 +2454,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_THREAD_WAKE_FREQUENCY</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.263">DEFAULT_THREAD_WAKE_FREQUENCY</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.265">DEFAULT_THREAD_WAKE_FREQUENCY</a></pre>
 <div class="block">Default value for thread wake frequency</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_THREAD_WAKE_FREQUENCY">Constant Field Values</a></dd></dl>
 </li>
@@ -2465,7 +2465,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>VERSION_FILE_WRITE_ATTEMPTS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.266">VERSION_FILE_WRITE_ATTEMPTS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.268">VERSION_FILE_WRITE_ATTEMPTS</a></pre>
 <div class="block">Parameter name for how often we should try to write a version file, before failing</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.VERSION_FILE_WRITE_ATTEMPTS">Constant Field Values</a></dd></dl>
 </li>
@@ -2476,7 +2476,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_VERSION_FILE_WRITE_ATTEMPTS</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.269">DEFAULT_VERSION_FILE_WRITE_ATTEMPTS</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.271">DEFAULT_VERSION_FILE_WRITE_ATTEMPTS</a></pre>
 <div class="block">Parameter name for how often we should try to write a version file, before failing</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS">Constant Field Values</a></dd></dl>
 </li>
@@ -2487,7 +2487,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MAJOR_COMPACTION_PERIOD</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.272">MAJOR_COMPACTION_PERIOD</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.274">MAJOR_COMPACTION_PERIOD</a></pre>
 <div class="block">Parameter name for how often a region should should perform a major compaction</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MAJOR_COMPACTION_PERIOD">Constant Field Values</a></dd></dl>
 </li>
@@ -2498,7 +2498,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>COMPACTION_KV_MAX</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.275">COMPACTION_KV_MAX</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.277">COMPACTION_KV_MAX</a></pre>
 <div class="block">Parameter name for the maximum batch of KVs to be used in flushes and compactions</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.COMPACTION_KV_MAX">Constant Field Values</a></dd></dl>
 </li>
@@ -2509,7 +2509,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>COMPACTION_KV_MAX_DEFAULT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.276">COMPACTION_KV_MAX_DEFAULT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.278">COMPACTION_KV_MAX_DEFAULT</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.COMPACTION_KV_MAX_DEFAULT">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2519,7 +2519,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_DIR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.279">HBASE_DIR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.281">HBASE_DIR</a></pre>
 <div class="block">Parameter name for HBase instance root directory</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_DIR">Constant Field Values</a></dd></dl>
 </li>
@@ -2530,7 +2530,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_CLIENT_IPC_POOL_TYPE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.282">HBASE_CLIENT_IPC_POOL_TYPE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.284">HBASE_CLIENT_IPC_POOL_TYPE</a></pre>
 <div class="block">Parameter name for HBase client IPC pool type</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_IPC_POOL_TYPE">Constant Field Values</a></dd></dl>
 </li>
@@ -2541,7 +2541,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_CLIENT_IPC_POOL_SIZE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.285">HBASE_CLIENT_IPC_POOL_SIZE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.287">HBASE_CLIENT_IPC_POOL_SIZE</a></pre>
 <div class="block">Parameter name for HBase client IPC pool size</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_IPC_POOL_SIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -2552,7 +2552,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_CLIENT_OPERATION_TIMEOUT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.288">HBASE_CLIENT_OPERATION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.290">HBASE_CLIENT_OPERATION_TIMEOUT</a></pre>
 <div class="block">Parameter name for HBase client operation timeout, which overrides RPC timeout</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_OPERATION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2563,7 +2563,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_CLIENT_META_OPERATION_TIMEOUT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.291">HBASE_CLIENT_META_OPERATION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.293">HBASE_CLIENT_META_OPERATION_TIMEOUT</a></pre>
 <div class="block">Parameter name for HBase client operation timeout, which overrides RPC timeout</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_META_OPERATION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2574,7 +2574,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.295">DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.297">DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT</a></pre>
 <div class="block">Default HBase client operation timeout, which is tantamount to a blocking call</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2585,7 +2585,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_LOGDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.298">HREGION_LOGDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.300">HREGION_LOGDIR_NAME</a></pre>
 <div class="block">Used to construct the name of the log directory for a region server</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_LOGDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2596,7 +2596,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>SPLIT_LOGDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.301">SPLIT_LOGDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.303">SPLIT_LOGDIR_NAME</a></pre>
 <div class="block">Used to construct the name of the splitlog directory for a region server</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.SPLIT_LOGDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2607,7 +2607,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_OLDLOGDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.304">HREGION_OLDLOGDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.306">HREGION_OLDLOGDIR_NAME</a></pre>
 <div class="block">Like the previous, but for old logs that are about to be deleted</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_OLDLOGDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2618,7 +2618,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CORRUPT_DIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.306">CORRUPT_DIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.308">CORRUPT_DIR_NAME</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CORRUPT_DIR_NAME">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2628,7 +2628,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBCK_SIDELINEDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.309">HBCK_SIDELINEDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.311">HBCK_SIDELINEDIR_NAME</a></pre>
 <div class="block">Used by HBCK to sideline backup data</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBCK_SIDELINEDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2639,7 +2639,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MIGRATION_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.312">MIGRATION_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.314">MIGRATION_NAME</a></pre>
 <div class="block">Any artifacts left from migration can be moved here</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MIGRATION_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2650,7 +2650,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>LIB_DIR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.319">LIB_DIR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.321">LIB_DIR</a></pre>
 <div class="block">The directory from which co-processor/custom filter jars can be loaded
  dynamically by the region servers. This value can be overridden by the
  hbase.dynamic.jars.dir config.</div>
@@ -2663,7 +2663,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_COMPACTIONDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.322">HREGION_COMPACTIONDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.324">HREGION_COMPACTIONDIR_NAME</a></pre>
 <div class="block">Used to construct the name of the compaction directory during compaction</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_COMPACTIONDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2674,7 +2674,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_MAX_FILESIZE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.325">HREGION_MAX_FILESIZE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.327">HREGION_MAX_FILESIZE</a></pre>
 <div class="block">Conf key for the max file size after which we split the region</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_MAX_FILESIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -2685,7 +2685,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MAX_FILE_SIZE</h4>
-<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.329">DEFAULT_MAX_FILE_SIZE</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.331">DEFAULT_MAX_FILE_SIZE</a></pre>
 <div class="block">Default maximum file size</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_MAX_FILE_SIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -2696,7 +2696,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_MAX_ROWSIZE_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.334">TABLE_MAX_ROWSIZE_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.336">TABLE_MAX_ROWSIZE_KEY</a></pre>
 <div class="block">Max size of single row for Get's or Scan's without in-row scanning flag set.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.TABLE_MAX_ROWSIZE_KEY">Constant Field Values</a></dd></dl>
 </li>
@@ -2707,7 +2707,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_MAX_ROWSIZE_DEFAULT</h4>
-<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.339">TABLE_MAX_ROWSIZE_DEFAULT</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.341">TABLE_MAX_ROWSIZE_DEFAULT</a></pre>
 <div class="block">Default max row size (1 Gb).</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.TABLE_MAX_ROWSIZE_DEFAULT">Constant Field Values</a></dd></dl>
 </li>
@@ -2718,7 +2718,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HSTORE_OPEN_AND_CLOSE_THREADS_MAX</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.345">HSTORE_OPEN_AND_CLOSE_THREADS_MAX</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.347">HSTORE_OPEN_AND_CLOSE_THREADS_MAX</a></pre>
 <div class="block">The max number of threads used for opening and closing stores or store
  files in parallel</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HSTORE_OPEN_AND_CLOSE_THREADS_MAX">Constant Field Values</a></dd></dl>
@@ -2730,7 +2730,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.352">DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.354">DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX</a></pre>
 <div class="block">The default number for the max number of threads used for opening and
  closing stores or store files in parallel</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX">Constant Field Values</a></dd></dl>
@@ -2742,7 +2742,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_MEMSTORE_BLOCK_MULTIPLIER</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.359">HREGION_MEMSTORE_BLOCK_MULTIPLIER</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.361">HREGION_MEMSTORE_BLOCK_MULTIPLIER</a></pre>
 <div class="block">Block updates if memstore has hbase.hregion.memstore.block.multiplier
  times hbase.hregion.memstore.flush.size bytes.  Useful preventing
  runaway memstore during spikes in update traffic.</div>
@@ -2755,7 +2755,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.365">DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.367">DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER</a></pre>
 <div class="block">Default value for hbase.hregion.memstore.block.multiplier</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER">Constant Field Values</a></dd></dl>
 </li>
@@ -2766,7 +2766,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_MEMSTORE_FLUSH_SIZE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.368">HREGION_MEMSTORE_FLUSH_SIZE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.370">HREGION_MEMSTORE_FLUSH_SIZE</a></pre>
 <div class="block">Conf key for the memstore size at which we flush the memstore</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_MEMSTORE_FLUSH_SIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -2777,7 +2777,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_EDITS_REPLAY_SKIP_ERRORS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.371">HREGION_EDITS_REPLAY_SKIP_ERRORS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.373">HREGION_EDITS_REPLAY_SKIP_ERRORS</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2787,7 +2787,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.374">DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.376">DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2797,7 +2797,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MAXIMUM_VALUE_LENGTH</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.378">MAXIMUM_VALUE_LENGTH</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.380">MAXIMUM_VALUE_LENGTH</a></pre>
 <div class="block">Maximum value length, enforced on KeyValue construction</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MAXIMUM_VALUE_LENGTH">Constant Field Values</a></dd></dl>
 </li>
@@ -2808,7 +2808,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_ID_FILE_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.381">CLUSTER_ID_FILE_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.383">CLUSTER_ID_FILE_NAME</a></pre>
 <div class="block">name of the file for unique cluster ID</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_ID_FILE_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2819,7 +2819,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_ID_DEFAULT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.384">CLUSTER_ID_DEFAULT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.386">CLUSTER_ID_DEFAULT</a></pre>
 <div class="block">Default value for cluster ID</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_ID_DEFAULT">Constant Field Values</a></dd></dl>
 </li>
@@ -2830,7 +2830,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>KEEP_SEQID_PERIOD</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.387">KEEP_SEQID_PERIOD</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.389">KEEP_SEQID_PERIOD</a></pre>
 <div class="block">Parameter name for # days to keep MVCC values during a major compaction</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.KEEP_SEQID_PERIOD">Constant Field Values</a></dd></dl>
 </li>
@@ -2841,7 +2841,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MIN_KEEP_SEQID_PERIOD</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.389">MIN_KEEP_SEQID_PERIOD</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.391">MIN_KEEP_SEQID_PERIOD</a></pre>
 <div class="block">At least to keep MVCC values in hfiles for 5 days</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MIN_KEEP_SEQID_PERIOD">Constant Field Values</a></dd></dl>
 </li>
@@ -2853,7 +2853,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <li class="blockList">
 <h4>META_TABLE_NAME</h4>
 <pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.417">META_TABLE_NAME</a></pre>
+public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.419">META_TABLE_NAME</a></pre>
 <div class="block"><span class="strong">Deprecated.</span>&nbsp;<i>For upgrades of 0.94 to 0.96</i></div>
 <div class="block">The hbase:meta table's name.</div>
 </li>
@@ -2864,7 +2864,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>BASE_NAMESPACE_DIR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.419">BASE_NAMESPACE_DIR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.421">BASE_NAMESPACE_DIR</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.BASE_NAMESPACE_DIR">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2874,7 +2874,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>META_ROW_DELIMITER</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.422">META_ROW_DELIMITER</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.424">META_ROW_DELIMITER</a></pre>
 <div class="block">delimiter used between portions of a region name</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.META_ROW_DELIMITER">Constant Field Values</a></dd></dl>
 </li>
@@ -2885,7 +2885,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>CATALOG_FAMILY_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.425">CATALOG_FAMILY_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.427">CATALOG_FAMILY_STR</a></pre>
 <div class="block">The catalog family as a string</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CATALOG_FAMILY_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -2896,7 +2896,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>CATALOG_FAMILY</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.428">CATALOG_FAMILY</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.430">CATALOG_FAMILY</a></pre>
 <div class="block">The catalog family</div>
 </li>
 </ul>
@@ -2906,7 +2906,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONINFO_QUALIFIER_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.431">REGIONINFO_QUALIFIER_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.433">REGIONINFO_QUALIFIER_STR</a></pre>
 <div class="block">The RegionInfo qualifier as a string</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGIONINFO_QUALIFIER_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -2917,7 +2917,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONINFO_QUALIFIER</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.434">REGIONINFO_QUALIFIER</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.436">REGIONINFO_QUALIFIER</a></pre>
 <div class="block">The regioninfo column qualifier</div>
 </li>
 </ul>
@@ -2927,7 +2927,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>SERVER_QUALIFIER_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.437">SERVER_QUALIFIER_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.439">SERVER_QUALIFIER_STR</a></pre>
 <div class="block">The server column qualifier</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.SERVER_QUALIFIER_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -2938,7 +2938,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>SERVER_QUALIFIER</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.439">SERVER_QUALIFIER</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.441">SERVER_QUALIFIER</a></pre>
 <div class="block">The server column qualifier</div>
 </li>
 </ul>
@@ -2948,7 +2948,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>STARTCODE_QUALIFIER_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.442">STARTCODE_QUALIFIER_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.444">STARTCODE_QUALIFIER_STR</a></pre>
 <div class="block">The startcode column qualifier</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.STARTCODE_QUALIFIER_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -2959,7 +2959,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>STARTCODE_QUALIFIER</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.444">STARTCODE_QUALIFIER</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.446">STARTCODE_QUALIFIER</a></pre>
 <div class="block">The startcode column qualifier</div>
 </li>
 </ul>
@@ -2969,7 +2969,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>SEQNUM_QUALIFIER_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.447">SEQNUM_QUALIFIER_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.449">SEQNUM_QUALIFIER_STR</a></pre>
 <div class="block">The open seqnum column qualifier</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.SEQNUM_QUALIFIER_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -2980,7 +2980,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class=

<TRUNCATED>

[45/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/checkstyle-aggregate.html
----------------------------------------------------------------------
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 8a49fe9..e883f1b 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Checkstyle Results</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -283,7 +283,7 @@
 <td>1693</td>
 <td>0</td>
 <td>0</td>
-<td>12706</td></tr></table></div>
+<td>12697</td></tr></table></div>
 <div class="section">
 <h2><a name="Files"></a>Files</h2>
 <table border="0" class="table table-striped">
@@ -1083,4906 +1083,4901 @@
 <td>0</td>
 <td>2</td></tr>
 <tr class="b">
-<td><a href="#org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeSeeker.java">org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java</a></td>
-<td>0</td>
-<td>0</td>
-<td>1</td></tr>
-<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory.java">org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArrayReversibleScanner.java">org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArrayScanner.java">org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher.java">org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.decode.row.RowNodeReader.java">org/apache/hadoop/hbase/codec/prefixtree/decode/row/RowNodeReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.encode.EncoderFactory.java">org/apache/hadoop/hbase/codec/prefixtree/encode/EncoderFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder.java">org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.encode.column.ColumnSectionWriter.java">org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder.java">org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.encode.row.RowSectionWriter.java">org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer.java">org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode.java">org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher.java">org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.codec.prefixtree.scanner.ReversibleCellScanner.java">org/apache/hadoop/hbase/codec/prefixtree/scanner/ReversibleCellScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.conf.ConfigurationManager.java">org/apache/hadoop/hbase/conf/ConfigurationManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.constraint.BaseConstraint.java">org/apache/hadoop/hbase/constraint/BaseConstraint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.constraint.Constraint.java">org/apache/hadoop/hbase/constraint/Constraint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.constraint.ConstraintException.java">org/apache/hadoop/hbase/constraint/ConstraintException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.constraint.ConstraintProcessor.java">org/apache/hadoop/hbase/constraint/ConstraintProcessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.constraint.Constraints.java">org/apache/hadoop/hbase/constraint/Constraints.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.constraint.package-info.java">org/apache/hadoop/hbase/constraint/package-info.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>61</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager.java">org/apache/hadoop/hbase/coordination/BaseCoordinatedStateManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination.java">org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination.java">org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination.java">org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager.java">org/apache/hadoop/hbase/coordination/ZkCoordinatedStateManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coordination.ZkSplitLogWorkerCoordination.java">org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.AggregateImplementation.java">org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.BaseMasterAndRegionObserver.java">org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.BaseMasterObserver.java">org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.BaseRegionObserver.java">org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.BaseRegionServerObserver.java">org/apache/hadoop/hbase/coprocessor/BaseRegionServerObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.BaseRowProcessorEndpoint.java">org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.BaseWALObserver.java">org/apache/hadoop/hbase/coprocessor/BaseWALObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.BulkLoadObserver.java">org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.ColumnInterpreter.java">org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>21</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.CoprocessorException.java">org/apache/hadoop/hbase/coprocessor/CoprocessorException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.CoprocessorHost.java">org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.CoprocessorService.java">org/apache/hadoop/hbase/coprocessor/CoprocessorService.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.EndpointObserver.java">org/apache/hadoop/hbase/coprocessor/EndpointObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment.java">org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.MasterObserver.java">org/apache/hadoop/hbase/coprocessor/MasterObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>45</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint.java">org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.ObserverContext.java">org/apache/hadoop/hbase/coprocessor/ObserverContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment.java">org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.RegionObserver.java">org/apache/hadoop/hbase/coprocessor/RegionObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>62</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.RegionServerObserver.java">org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>27</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.SingletonCoprocessorService.java">org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.WALCoprocessorEnvironment.java">org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.WALObserver.java">org/apache/hadoop/hbase/coprocessor/WALObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.example.BulkDeleteEndpoint.java">org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.example.RowCountEndpoint.java">org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.example.ZooKeeperScanPolicyObserver.java">org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>22</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.coprocessor.package-info.java">org/apache/hadoop/hbase/coprocessor/package-info.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.errorhandling.ForeignException.java">org/apache/hadoop/hbase/errorhandling/ForeignException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher.java">org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.errorhandling.TimeoutExceptionInjector.java">org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.exceptions.FailedSanityCheckException.java">org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.exceptions.LockTimeoutException.java">org/apache/hadoop/hbase/exceptions/LockTimeoutException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException.java">org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>28</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.executor.EventHandler.java">org/apache/hadoop/hbase/executor/EventHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.executor.EventType.java">org/apache/hadoop/hbase/executor/EventType.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.executor.ExecutorService.java">org/apache/hadoop/hbase/executor/ExecutorService.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.executor.ExecutorType.java">org/apache/hadoop/hbase/executor/ExecutorType.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.BinaryComparator.java">org/apache/hadoop/hbase/filter/BinaryComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.BinaryPrefixComparator.java">org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.BitComparator.java">org/apache/hadoop/hbase/filter/BitComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ByteArrayComparable.java">org/apache/hadoop/hbase/filter/ByteArrayComparable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.ColumnCountGetFilter.java">org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ColumnPaginationFilter.java">org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.ColumnPrefixFilter.java">org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ColumnRangeFilter.java">org/apache/hadoop/hbase/filter/ColumnRangeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>21</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.CompareFilter.java">org/apache/hadoop/hbase/filter/CompareFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>21</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.DependentColumnFilter.java">org/apache/hadoop/hbase/filter/DependentColumnFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FamilyFilter.java">org/apache/hadoop/hbase/filter/FamilyFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.Filter.java">org/apache/hadoop/hbase/filter/Filter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterBase.java">org/apache/hadoop/hbase/filter/FilterBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterList.java">org/apache/hadoop/hbase/filter/FilterList.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>37</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FilterWrapper.java">org/apache/hadoop/hbase/filter/FilterWrapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.java">org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter.java">org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.FuzzyRowFilter.java">org/apache/hadoop/hbase/filter/FuzzyRowFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>17</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.InclusiveStopFilter.java">org/apache/hadoop/hbase/filter/InclusiveStopFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.KeyOnlyFilter.java">org/apache/hadoop/hbase/filter/KeyOnlyFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.LongComparator.java">org/apache/hadoop/hbase/filter/LongComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>39</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.MultiRowRangeFilter.java">org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter.java">org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.NullComparator.java">org/apache/hadoop/hbase/filter/NullComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.PageFilter.java">org/apache/hadoop/hbase/filter/PageFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ParseConstants.java">org/apache/hadoop/hbase/filter/ParseConstants.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.ParseFilter.java">org/apache/hadoop/hbase/filter/ParseFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>36</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.PrefixFilter.java">org/apache/hadoop/hbase/filter/PrefixFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.QualifierFilter.java">org/apache/hadoop/hbase/filter/QualifierFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.RandomRowFilter.java">org/apache/hadoop/hbase/filter/RandomRowFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.RegexStringComparator.java">org/apache/hadoop/hbase/filter/RegexStringComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.RowFilter.java">org/apache/hadoop/hbase/filter/RowFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter.java">org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.SingleColumnValueFilter.java">org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>29</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.SkipFilter.java">org/apache/hadoop/hbase/filter/SkipFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.SubstringComparator.java">org/apache/hadoop/hbase/filter/SubstringComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.TimestampsFilter.java">org/apache/hadoop/hbase/filter/TimestampsFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.filter.ValueFilter.java">org/apache/hadoop/hbase/filter/ValueFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.filter.WhileMatchFilter.java">org/apache/hadoop/hbase/filter/WhileMatchFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.fs.HFileSystem.java">org/apache/hadoop/hbase/fs/HFileSystem.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>32</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.ClickjackingPreventionFilter.java">org/apache/hadoop/hbase/http/ClickjackingPreventionFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.HtmlQuoting.java">org/apache/hadoop/hbase/http/HtmlQuoting.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>17</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.HttpConfig.java">org/apache/hadoop/hbase/http/HttpConfig.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.HttpRequestLog.java">org/apache/hadoop/hbase/http/HttpRequestLog.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.HttpRequestLogAppender.java">org/apache/hadoop/hbase/http/HttpRequestLogAppender.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.HttpServer.java">org/apache/hadoop/hbase/http/HttpServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>46</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.InfoServer.java">org/apache/hadoop/hbase/http/InfoServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.NoCacheFilter.java">org/apache/hadoop/hbase/http/NoCacheFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.ServerConfigurationKeys.java">org/apache/hadoop/hbase/http/ServerConfigurationKeys.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.SslSocketConnectorSecure.java">org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.conf.ConfServlet.java">org/apache/hadoop/hbase/http/conf/ConfServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.jmx.JMXJsonServlet.java">org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.http.lib.StaticUserWebFilter.java">org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.http.log.LogLevel.java">org/apache/hadoop/hbase/http/log/LogLevel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.BoundedByteBufferPool.java">org/apache/hadoop/hbase/io/BoundedByteBufferPool.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.ByteBuffInputStream.java">org/apache/hadoop/hbase/io/ByteBuffInputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.ByteBufferOutputStream.java">org/apache/hadoop/hbase/io/ByteBufferOutputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.CellOutputStream.java">org/apache/hadoop/hbase/io/CellOutputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.FSDataInputStreamWrapper.java">org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.FileLink.java">org/apache/hadoop/hbase/io/FileLink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.HFileLink.java">org/apache/hadoop/hbase/io/HFileLink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.HalfStoreFileReader.java">org/apache/hadoop/hbase/io/HalfStoreFileReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.HeapSize.java">org/apache/hadoop/hbase/io/HeapSize.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.ImmutableBytesWritable.java">org/apache/hadoop/hbase/io/ImmutableBytesWritable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.LimitInputStream.java">org/apache/hadoop/hbase/io/LimitInputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.Reference.java">org/apache/hadoop/hbase/io/Reference.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>19</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.TagCompressionContext.java">org/apache/hadoop/hbase/io/TagCompressionContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.TimeRange.java">org/apache/hadoop/hbase/io/TimeRange.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.WALLink.java">org/apache/hadoop/hbase/io/WALLink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.compress.Compression.java">org/apache/hadoop/hbase/io/compress/Compression.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Cipher.java">org/apache/hadoop/hbase/io/crypto/Cipher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Context.java">org/apache/hadoop/hbase/io/crypto/Context.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Decryptor.java">org/apache/hadoop/hbase/io/crypto/Decryptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Encryption.java">org/apache/hadoop/hbase/io/crypto/Encryption.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>54</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.Encryptor.java">org/apache/hadoop/hbase/io/crypto/Encryptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.KeyProvider.java">org/apache/hadoop/hbase/io/crypto/KeyProvider.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.aes.AES.java">org/apache/hadoop/hbase/io/crypto/aes/AES.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.aes.AESDecryptor.java">org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.crypto.aes.AESEncryptor.java">org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.BufferedDataBlockEncoder.java">org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>7</td></tr>
-<tr class="a">
+<td>6</td></tr>
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.java">org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>15</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.DataBlockEncoding.java">org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.EncodedDataBlock.java">org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext.java">org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext.java">org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext.java">org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream.java">org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockCache.java">org/apache/hadoop/hbase/io/hfile/BlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.java">org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>22</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockCachesIterator.java">org/apache/hadoop/hbase/io/hfile/BlockCachesIterator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.BlockType.java">org/apache/hadoop/hbase/io/hfile/BlockType.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheConfig.java">org/apache/hadoop/hbase/io/hfile/CacheConfig.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheStats.java">org/apache/hadoop/hbase/io/hfile/CacheStats.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheableDeserializer.java">org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager.java">org/apache/hadoop/hbase/io/hfile/CacheableDeserializerIdManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.ChecksumUtil.java">org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CombinedBlockCache.java">org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CompoundBloomFilter.java">org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterBase.java">org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterWriter.java">org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CorruptHFileException.java">org/apache/hadoop/hbase/io/hfile/CorruptHFileException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.FixedFileTrailer.java">org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFile.java">org/apache/hadoop/hbase/io/hfile/HFile.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>48</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileBlock.java">org/apache/hadoop/hbase/io/hfile/HFileBlock.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>52</td></tr>
-<tr class="a">
+<td>51</td></tr>
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.java">org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>41</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileContext.java">org/apache/hadoop/hbase/io/hfile/HFileContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder.java">org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl.java">org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter.java">org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>19</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileReaderImpl.java">org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>53</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileScanner.java">org/apache/hadoop/hbase/io/hfile/HFileScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>22</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.HFileWriterImpl.java">org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>22</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.LruBlockCache.java">org/apache/hadoop/hbase/io/hfile/LruBlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>18</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.LruCachedBlock.java">org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.LruCachedBlockQueue.java">org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache.java">org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder.java">org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.PrefetchExecutor.java">org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.java">org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>34</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.java">org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>28</td></tr>
-<tr class="a">
+<td>27</td></tr>
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.ByteBufferIOEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.CachedEntryQueue.java">org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.FileIOEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.FileMmapEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.IOEngine.java">org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap.java">org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.util.HeapMemorySizeUtil.java">org/apache/hadoop/hbase/io/util/HeapMemorySizeUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.util.LRUDictionary.java">org/apache/hadoop/hbase/io/util/LRUDictionary.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.util.StreamUtils.java">org/apache/hadoop/hbase/io/util/StreamUtils.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.AbstractRpcClient.java">org/apache/hadoop/hbase/ipc/AbstractRpcClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.AsyncCall.java">org/apache/hadoop/hbase/ipc/AsyncCall.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.AsyncRpcChannel.java">org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.AsyncRpcClient.java">org/apache/hadoop/hbase/ipc/AsyncRpcClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>34</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.AsyncServerResponseHandler.java">org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.BlockingRpcCallback.java">org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.BufferChain.java">org/apache/hadoop/hbase/ipc/BufferChain.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.Call.java">org/apache/hadoop/hbase/ipc/Call.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.CallRunner.java">org/apache/hadoop/hbase/ipc/CallRunner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.CallTimeoutException.java">org/apache/hadoop/hbase/ipc/CallTimeoutException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.ConnectionId.java">org/apache/hadoop/hbase/ipc/ConnectionId.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.java">org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.FailedServers.java">org/apache/hadoop/hbase/ipc/FailedServers.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.FifoRpcScheduler.java">org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.IPCUtil.java">org/apache/hadoop/hbase/ipc/IPCUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>48</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.MasterCoprocessorRpcChannel.java">org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.MetricsHBaseServer.java">org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.MetricsHBaseServerSourceImpl.java">org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.PriorityFunction.java">org/apache/hadoop/hbase/ipc/PriorityFunction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.RWQueueRpcExecutor.java">org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.java">org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.RegionServerCoprocessorRpcChannel.java">org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcCallContext.java">org/apache/hadoop/hbase/ipc/RpcCallContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcClient.java">org/apache/hadoop/hbase/ipc/RpcClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcClientFactory.java">org/apache/hadoop/hbase/ipc/RpcClientFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcClientImpl.java">org/apache/hadoop/hbase/ipc/RpcClientImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>26</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcExecutor.java">org/apache/hadoop/hbase/ipc/RpcExecutor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcScheduler.java">org/apache/hadoop/hbase/ipc/RpcScheduler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcSchedulerContext.java">org/apache/hadoop/hbase/ipc/RpcSchedulerContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcServer.java">org/apache/hadoop/hbase/ipc/RpcServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>84</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.RpcServerInterface.java">org/apache/hadoop/hbase/ipc/RpcServerInterface.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.ServerRpcController.java">org/apache/hadoop/hbase/ipc/ServerRpcController.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.ipc.SimpleRpcScheduler.java">org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ipc.TimeLimitedRpcController.java">org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.Driver.java">org/apache/hadoop/hbase/mapred/Driver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapred.GroupingTableMap.java">org/apache/hadoop/hbase/mapred/GroupingTableMap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.HRegionPartitioner.java">org/apache/hadoop/hbase/mapred/HRegionPartitioner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapred.IdentityTableMap.java">org/apache/hadoop/hbase/mapred/IdentityTableMap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.IdentityTableReduce.java">org/apache/hadoop/hbase/mapred/IdentityTableReduce.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapred.MultiTableSnapshotInputFormat.java">org/apache/hadoop/hbase/mapred/MultiTableSnapshotInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.RowCounter.java">org/apache/hadoop/hbase/mapred/RowCounter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableInputFormat.java">org/apache/hadoop/hbase/mapred/TableInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableInputFormatBase.java">org/apache/hadoop/hbase/mapred/TableInputFormatBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableMap.java">org/apache/hadoop/hbase/mapred/TableMap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableMapReduceUtil.java">org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableOutputFormat.java">org/apache/hadoop/hbase/mapred/TableOutputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableRecordReader.java">org/apache/hadoop/hbase/mapred/TableRecordReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableRecordReaderImpl.java">org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableReduce.java">org/apache/hadoop/hbase/mapred/TableReduce.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableSnapshotInputFormat.java">org/apache/hadoop/hbase/mapred/TableSnapshotInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapred.TableSplit.java">org/apache/hadoop/hbase/mapred/TableSplit.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>18</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.CellCounter.java">org/apache/hadoop/hbase/mapreduce/CellCounter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.CellCreator.java">org/apache/hadoop/hbase/mapreduce/CellCreator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.CopyTable.java">org/apache/hadoop/hbase/mapreduce/CopyTable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.DefaultVisibilityExpressionResolver.java">org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.Driver.java">org/apache/hadoop/hbase/mapreduce/Driver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.Export.java">org/apache/hadoop/hbase/mapreduce/Export.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.GroupingTableMapper.java">org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.java">org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.HRegionPartitioner.java">org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.HashTable.java">org/apache/hadoop/hbase/mapreduce/HashTable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.IdentityTableMapper.java">org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.IdentityTableReducer.java">org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.Import.java">org/apache/hadoop/hbase/mapreduce/Import.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.ImportTsv.java">org/apache/hadoop/hbase/mapreduce/ImportTsv.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>20</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.IndexBuilder.java">org/apache/hadoop/hbase/mapreduce/IndexBuilder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.JarFinder.java">org/apache/hadoop/hbase/mapreduce/JarFinder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.JobUtil.java">org/apache/hadoop/hbase/mapreduce/JobUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.KeyValueSortReducer.java">org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.java">org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>18</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.MultiTableInputFormat.java">org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.MultiTableInputFormatBase.java">org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.MultiTableOutputFormat.java">org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.MultiTableSnapshotInputFormat.java">org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.MultiTableSnapshotInputFormatImpl.java">org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.MultithreadedTableMapper.java">org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.PutCombiner.java">org/apache/hadoop/hbase/mapreduce/PutCombiner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.PutSortReducer.java">org/apache/hadoop/hbase/mapreduce/PutSortReducer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.ResultSerialization.java">org/apache/hadoop/hbase/mapreduce/ResultSerialization.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.RowCounter.java">org/apache/hadoop/hbase/mapreduce/RowCounter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.SampleUploader.java">org/apache/hadoop/hbase/mapreduce/SampleUploader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.java">org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.SyncTable.java">org/apache/hadoop/hbase/mapreduce/SyncTable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableInputFormat.java">org/apache/hadoop/hbase/mapreduce/TableInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.java">org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>15</td></tr>
-<tr class="a">
+<td>14</td></tr>
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.java">org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>54</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableMapper.java">org/apache/hadoop/hbase/mapreduce/TableMapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableOutputCommitter.java">org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableOutputFormat.java">org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableRecordReader.java">org/apache/hadoop/hbase/mapreduce/TableRecordReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl.java">org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableReducer.java">org/apache/hadoop/hbase/mapreduce/TableReducer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat.java">org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatImpl.java">org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TableSplit.java">org/apache/hadoop/hbase/mapreduce/TableSplit.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TextSortReducer.java">org/apache/hadoop/hbase/mapreduce/TextSortReducer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TsvImporterMapper.java">org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper.java">org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.VisibilityExpressionResolver.java">org/apache/hadoop/hbase/mapreduce/VisibilityExpressionResolver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.WALInputFormat.java">org/apache/hadoop/hbase/mapreduce/WALInputFormat.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.WALPlayer.java">org/apache/hadoop/hbase/mapreduce/WALPlayer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication.java">org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.ActiveMasterManager.java">org/apache/hadoop/hbase/master/ActiveMasterManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.AssignCallable.java">org/apache/hadoop/hbase/master/AssignCallable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.AssignmentListener.java">org/apache/hadoop/hbase/master/AssignmentListener.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.AssignmentManager.java">org/apache/hadoop/hbase/master/AssignmentManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>180</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.AssignmentVerificationReport.java">org/apache/hadoop/hbase/master/AssignmentVerificationReport.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.BulkAssigner.java">org/apache/hadoop/hbase/master/BulkAssigner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.BulkReOpen.java">org/apache/hadoop/hbase/master/BulkReOpen.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.CatalogJanitor.java">org/apache/hadoop/hbase/master/CatalogJanitor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>27</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.ClusterSchemaServiceImpl.java">org/apache/hadoop/hbase/master/ClusterSchemaServiceImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.ClusterStatusPublisher.java">org/apache/hadoop/hbase/master/ClusterStatusPublisher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.DeadServer.java">org/apache/hadoop/hbase/master/DeadServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.ExpiredMobFileCleanerChore.java">org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.GeneralBulkAssigner.java">org/apache/hadoop/hbase/master/GeneralBulkAssigner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.HMaster.java">org/apache/hadoop/hbase/master/HMaster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>56</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.HMasterCommandLine.java">org/apache/hadoop/hbase/master/HMasterCommandLine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.LoadBalancer.java">org/apache/hadoop/hbase/master/LoadBalancer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.MasterCoprocessorHost.java">org/apache/hadoop/hbase/master/MasterCoprocessorHost.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.MasterDumpServlet.java">org/apache/hadoop/hbase/master/MasterDumpServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.MasterFileSystem.java">org/apache/hadoop/hbase/master/MasterFileSystem.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>22</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.MasterRpcServices.java">org/apache/hadoop/hbase/master/MasterRpcServices.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.MasterServices.java">org/apache/hadoop/hbase/master/MasterServices.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>40</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.MasterStatusServlet.java">org/apache/hadoop/hbase/master/MasterStatusServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.MetricsAssignmentManager.java">org/apache/hadoop/hbase/master/MetricsAssignmentManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.MetricsAssignmentManagerSourceImpl.java">org/apache/hadoop/hbase/master/MetricsAssignmentManagerSourceImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.MetricsMaster.java">org/apache/hadoop/hbase/master/MetricsMaster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.MetricsMasterProcSource.java">org/apache/hadoop/hbase/master/MetricsMasterProcSource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.MetricsMasterSourceImpl.java">org/apache/hadoop/hbase/master/MetricsMasterSourceImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.MetricsMasterWrapperImpl.java">org/apache/hadoop/hbase/master/MetricsMasterWrapperImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.RackManager.java">org/apache/hadoop/hbase/master/RackManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.RegionPlacementMaintainer.java">org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>235</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.RegionPlan.java">org/apache/hadoop/hbase/master/RegionPlan.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.RegionState.java">org/apache/hadoop/hbase/master/RegionState.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>96</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.RegionStateStore.java">org/apache/hadoop/hbase/master/RegionStateStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.RegionStates.java">org/apache/hadoop/hbase/master/RegionStates.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>21</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.ServerListener.java">org/apache/hadoop/hbase/master/ServerListener.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.ServerManager.java">org/apache/hadoop/hbase/master/ServerManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>34</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.SnapshotOfRegionAssignmentFromMeta.java">org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.SnapshotSentinel.java">org/apache/hadoop/hbase/master/SnapshotSentinel.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.SplitLogManager.java">org/apache/hadoop/hbase/master/SplitLogManager.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>24</td></tr>
-<tr class="a">
+<td>23</td></tr>
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.TableLockManager.java">org/apache/hadoop/hbase/master/TableLockManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.TableNamespaceManager.java">org/apache/hadoop/hbase/master/TableNamespaceManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.TableStateManager.java">org/apache/hadoop/hbase/master/TableStateManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.UnAssignCallable.java">org/apache/hadoop/hbase/master/UnAssignCallable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.java">org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>82</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.ClusterLoadState.java">org/apache/hadoop/hbase/master/balancer/ClusterLoadState.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.FavoredNodeAssignmentHelper.java">org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>35</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.FavoredNodeLoadBalancer.java">org/apache/hadoop/hbase/master/balancer/FavoredNodeLoadBalancer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.FavoredNodesPlan.java">org/apache/hadoop/hbase/master/balancer/FavoredNodesPlan.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory.java">org/apache/hadoop/hbase/master/balancer/LoadBalancerFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.MetricsBalancerSourceImpl.java">org/apache/hadoop/hbase/master/balancer/MetricsBalancerSourceImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.MetricsStochasticBalancerSourceImpl.java">org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.RegionInfoComparator.java">org/apache/hadoop/hbase/master/balancer/RegionInfoComparator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.RegionLocationFinder.java">org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.ServerAndLoad.java">org/apache/hadoop/hbase/master/balancer/ServerAndLoad.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.SimpleLoadBalancer.java">org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>17</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer.java">org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>35</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.BaseFileCleanerDelegate.java">org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.BaseLogCleanerDelegate.java">org/apache/hadoop/hbase/master/cleaner/BaseLogCleanerDelegate.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.CleanerChore.java">org/apache/hadoop/hbase/master/cleaner/CleanerChore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.FileCleanerDelegate.java">org/apache/hadoop/hbase/master/cleaner/FileCleanerDelegate.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.HFileCleaner.java">org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.HFileLinkCleaner.java">org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.LogCleaner.java">org/apache/hadoop/hbase/master/cleaner/LogCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner.java">org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.cleaner.TimeToLiveLogCleaner.java">org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.handler.CreateTableHandler.java">org/apache/hadoop/hbase/master/handler/CreateTableHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.handler.DisableTableHandler.java">org/apache/hadoop/hbase/master/handler/DisableTableHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.handler.DispatchMergingRegionHandler.java">org/apache/hadoop/hbase/master/handler/DispatchMergingRegionHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.handler.EnableTableHandler.java">org/apache/hadoop/hbase/master/handler/EnableTableHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.handler.TableEventHandler.java">org/apache/hadoop/hbase/master/handler/TableEventHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.handler.TotesHRegionInfo.java">org/apache/hadoop/hbase/master/handler/TotesHRegionInfo.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.normalizer.MergeNormalizationPlan.java">org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.normalizer.RegionNormalizerChore.java">org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.normalizer.SimpleRegionNormalizer.java">org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.normalizer.SplitNormalizationPlan.java">org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.AddColumnFamilyProcedure.java">org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>63</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.CreateNamespaceProcedure.java">org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>57</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.java">org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.DeleteColumnFamilyProcedure.java">org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>74</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.DeleteNamespaceProcedure.java">org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>58</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.DeleteTableProcedure.java">org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.java">org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>66</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.EnableTableProcedure.java">org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>86</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.MasterDDLOperationHelper.java">org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler.java">org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>22</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.ModifyColumnFamilyProcedure.java">org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>63</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.ModifyNamespaceProcedure.java">org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>36</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.ModifyTableProcedure.java">org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>94</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait.java">org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>26</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure.java">org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>108</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.procedure.TruncateTableProcedure.java">org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.CloneSnapshotHandler.java">org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.DisabledTableSnapshotHandler.java">org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.EnabledTableSnapshotHandler.java">org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.MasterSnapshotVerifier.java">org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.RestoreSnapshotHandler.java">org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.SnapshotFileCache.java">org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.SnapshotHFileCleaner.java">org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.SnapshotManager.java">org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.snapshot.TakeSnapshotHandler.java">org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.metrics.BaseSourceImpl.java">org/apache/hadoop/hbase/metrics/BaseSourceImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mob.CachedMobFile.java">org/apache/hadoop/hbase/mob/CachedMobFile.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mob.DefaultMobStoreCompactor.java">org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mob.DefaultMobStoreFlusher.java">org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mob.ExpiredMobFileCleaner.java">org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mob.MobFile.java">org/apache/hadoop/hbase/mob/MobFile.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mob.MobFileCache.java">org/apache/hadoop/hbase/mob/MobFileCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mob.MobUtils.java">org/apache/hadoop/hbase/mob/MobUtils.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mob.compactions.MobCompactor.java">org/apache/hadoop/hbase/mob/compactions/MobCompactor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mob.compactions.PartitionedMobCompactor.java">org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mob.mapreduce.MemStoreWrapper.java">org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mob.mapreduce.SweepJob.java">org/apache/hadoop/hbase/mob/mapreduce/SweepJob.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.mob.mapreduce.SweepReducer.java">org/apache/hadoop/hbase/mob/mapreduce/SweepReducer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.mob.mapreduce.Sweeper.java">org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.monitoring.LogMonitoring.java">org/apache/hadoop/hbase/monitoring/LogMonitoring.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.monitoring.MemoryBoundedLogMessageBuffer.java">org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler.java">org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.monitoring.MonitoredRPCHandlerImpl.java">org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#

<TRUNCATED>

[37/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
index bb96d93..194d23e 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.619">HFile.FileInfo</a>
+<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.620">HFile.FileInfo</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</pre>
 <div class="block">Metadata for this file. Conjured by the writer. Read in by the reader.</div>
@@ -355,7 +355,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>RESERVED_PREFIX</h4>
-<pre>static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.620">RESERVED_PREFIX</a></pre>
+<pre>static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.621">RESERVED_PREFIX</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFile.FileInfo.RESERVED_PREFIX">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -365,7 +365,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>RESERVED_PREFIX_BYTES</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.621">RESERVED_PREFIX_BYTES</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.622">RESERVED_PREFIX_BYTES</a></pre>
 </li>
 </ul>
 <a name="LASTKEY">
@@ -374,7 +374,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>LASTKEY</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.622">LASTKEY</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.623">LASTKEY</a></pre>
 </li>
 </ul>
 <a name="AVG_KEY_LEN">
@@ -383,7 +383,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>AVG_KEY_LEN</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.623">AVG_KEY_LEN</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.624">AVG_KEY_LEN</a></pre>
 </li>
 </ul>
 <a name="AVG_VALUE_LEN">
@@ -392,7 +392,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>AVG_VALUE_LEN</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.624">AVG_VALUE_LEN</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.625">AVG_VALUE_LEN</a></pre>
 </li>
 </ul>
 <a name="CREATE_TIME_TS">
@@ -401,7 +401,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>CREATE_TIME_TS</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.625">CREATE_TIME_TS</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.626">CREATE_TIME_TS</a></pre>
 </li>
 </ul>
 <a name="COMPARATOR">
@@ -410,7 +410,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>COMPARATOR</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.626">COMPARATOR</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.627">COMPARATOR</a></pre>
 </li>
 </ul>
 <a name="TAGS_COMPRESSED">
@@ -419,7 +419,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>TAGS_COMPRESSED</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.627">TAGS_COMPRESSED</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.628">TAGS_COMPRESSED</a></pre>
 </li>
 </ul>
 <a name="MAX_TAGS_LEN">
@@ -428,7 +428,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>MAX_TAGS_LEN</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.628">MAX_TAGS_LEN</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.629">MAX_TAGS_LEN</a></pre>
 </li>
 </ul>
 <a name="map">
@@ -437,7 +437,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockListLast">
 <li class="blockList">
 <h4>map</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.629">map</a></pre>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.630">map</a></pre>
 </li>
 </ul>
 </li>
@@ -454,7 +454,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFile.FileInfo</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.631">HFile.FileInfo</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.632">HFile.FileInfo</a>()</pre>
 </li>
 </ul>
 </li>
@@ -471,7 +471,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>append</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.646">append</a>(byte[]&nbsp;k,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.647">append</a>(byte[]&nbsp;k,
                     byte[]&nbsp;v,
                     boolean&nbsp;checkPrefix)
                       throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -490,7 +490,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>clear</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.659">clear</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.660">clear</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#clear()" title="class or interface in java.util">clear</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -503,7 +503,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>comparator</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;? super byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.663">comparator</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;? super byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.664">comparator</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true#comparator()" title="class or interface in java.util">comparator</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -516,7 +516,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>containsKey</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.667">containsKey</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.668">containsKey</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#containsKey(java.lang.Object)" title="class or interface in java.util">containsKey</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -529,7 +529,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>containsValue</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.671">containsValue</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;value)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.672">containsValue</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;value)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#containsValue(java.lang.Object)" title="class or interface in java.util">containsValue</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -542,7 +542,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>entrySet</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;byte[],byte[]&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.675">entrySet</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;byte[],byte[]&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.676">entrySet</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#entrySet()" title="class or interface in java.util">entrySet</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -557,7 +557,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.679">equals</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.680">equals</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#equals(java.lang.Object)" title="class or interface in java.util">equals</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -572,7 +572,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>firstKey</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.683">firstKey</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.684">firstKey</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true#firstKey()" title="class or interface in java.util">firstKey</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -585,7 +585,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>get</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.687">get</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.688">get</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#get(java.lang.Object)" title="class or interface in java.util">get</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -598,7 +598,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.691">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.692">hashCode</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#hashCode()" title="class or interface in java.util">hashCode</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -613,7 +613,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>headMap</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.695">headMap</a>(byte[]&nbsp;toKey)</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.696">headMap</a>(byte[]&nbsp;toKey)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true#headMap(K)" title="class or interface in java.util">headMap</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -626,7 +626,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>isEmpty</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.699">isEmpty</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.700">isEmpty</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#isEmpty()" title="class or interface in java.util">isEmpty</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -639,7 +639,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>keySet</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.703">keySet</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.704">keySet</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#keySet()" title="class or interface in java.util">keySet</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -654,7 +654,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>lastKey</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.707">lastKey</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.708">lastKey</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true#lastKey()" title="class or interface in java.util">lastKey</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -667,7 +667,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>put</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.711">put</a>(byte[]&nbsp;key,
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.712">put</a>(byte[]&nbsp;key,
          byte[]&nbsp;value)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -681,7 +681,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>putAll</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.715">putAll</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;? extends byte[],? extends byte[]&gt;&nbsp;m)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.716">putAll</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;? extends byte[],? extends byte[]&gt;&nbsp;m)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#putAll(java.util.Map)" title="class or interface in java.util">putAll</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -694,7 +694,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>remove</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.719">remove</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.720">remove</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#remove(java.lang.Object)" title="class or interface in java.util">remove</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -707,7 +707,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>size</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.723">size</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.724">size</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#size()" title="class or interface in java.util">size</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -720,7 +720,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>subMap</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.727">subMap</a>(byte[]&nbsp;fromKey,
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.728">subMap</a>(byte[]&nbsp;fromKey,
                               byte[]&nbsp;toKey)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -734,7 +734,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>tailMap</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.731">tailMap</a>(byte[]&nbsp;fromKey)</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.732">tailMap</a>(byte[]&nbsp;fromKey)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true#tailMap(K)" title="class or interface in java.util">tailMap</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -747,7 +747,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.735">values</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.736">values</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true#values()" title="class or interface in java.util">values</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -762,7 +762,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>write</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.746">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.747">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
      throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Write out this instance on the passed in <code>out</code> stream.
  We write it as a protobuf.</div>
@@ -777,7 +777,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>read</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.765">read</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;in)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.766">read</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;in)
     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Populate this instance with what we find on the passed in <code>in</code> stream.
  Can deserialize protobuf of old Writables format.</div>
@@ -792,7 +792,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockList">
 <li class="blockList">
 <h4>parseWritable</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.795">parseWritable</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;in)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.796">parseWritable</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;in)
              throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of
  a byte [].  The old map format had a byte before each entry that held a code which was short for the key or
@@ -807,7 +807,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/util/SortedMap
 <ul class="blockListLast">
 <li class="blockList">
 <h4>parsePB</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.814">parsePB</a>(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto&nbsp;fip)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.815">parsePB</a>(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto&nbsp;fip)</pre>
 <div class="block">Fill our map with content of the pb we read off disk</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>fip</code> - protobuf message to read</dd></dl>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
index c9af38d..be73317 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
@@ -99,7 +99,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.396">HFile.Reader</a>
+<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.397">HFile.Reader</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.CachingBlockReader</a></pre>
 <div class="block">An interface used by clients to open and iterate an <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>.</div>
 </li>
@@ -295,7 +295,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.402">getName</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.403">getName</a>()</pre>
 <div class="block">Returns this reader's "name". Usually the last component of the path.
  Needs to be constant as the file is being moved to support caching on
  write.</div>
@@ -307,7 +307,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getComparator</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.404">getComparator</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.405">getComparator</a>()</pre>
 </li>
 </ul>
 <a name="getScanner(boolean, boolean, boolean)">
@@ -316,7 +316,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.406">getScanner</a>(boolean&nbsp;cacheBlocks,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.407">getScanner</a>(boolean&nbsp;cacheBlocks,
                       boolean&nbsp;pread,
                       boolean&nbsp;isCompaction)</pre>
 </li>
@@ -327,7 +327,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaBlock</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.408">getMetaBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;metaBlockName,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.409">getMetaBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;metaBlockName,
                       boolean&nbsp;cacheBlock)
                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -340,7 +340,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>loadFileInfo</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.410">loadFileInfo</a>()
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.411">loadFileInfo</a>()
                                 throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -352,7 +352,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastKey</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.412">getLastKey</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.413">getLastKey</a>()</pre>
 </li>
 </ul>
 <a name="midkey()">
@@ -361,7 +361,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>midkey</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.414">midkey</a>()
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.415">midkey</a>()
             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -373,7 +373,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>length</h4>
-<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.416">length</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.417">length</a>()</pre>
 </li>
 </ul>
 <a name="getEntries()">
@@ -382,7 +382,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getEntries</h4>
-<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.418">getEntries</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.419">getEntries</a>()</pre>
 </li>
 </ul>
 <a name="getFirstKey()">
@@ -391,7 +391,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getFirstKey</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.420">getFirstKey</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.421">getFirstKey</a>()</pre>
 </li>
 </ul>
 <a name="indexSize()">
@@ -400,7 +400,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>indexSize</h4>
-<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.422">indexSize</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.423">indexSize</a>()</pre>
 </li>
 </ul>
 <a name="getFirstRowKey()">
@@ -409,7 +409,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getFirstRowKey</h4>
-<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.424">getFirstRowKey</a>()</pre>
+<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.425">getFirstRowKey</a>()</pre>
 </li>
 </ul>
 <a name="getLastRowKey()">
@@ -418,7 +418,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastRowKey</h4>
-<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.426">getLastRowKey</a>()</pre>
+<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.427">getLastRowKey</a>()</pre>
 </li>
 </ul>
 <a name="getTrailer()">
@@ -427,7 +427,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTrailer</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.428">getTrailer</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.429">getTrailer</a>()</pre>
 </li>
 </ul>
 <a name="getDataBlockIndexReader()">
@@ -436,7 +436,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataBlockIndexReader</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.430">getDataBlockIndexReader</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.431">getDataBlockIndexReader</a>()</pre>
 </li>
 </ul>
 <a name="getScanner(boolean, boolean)">
@@ -445,7 +445,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.432">getScanner</a>(boolean&nbsp;cacheBlocks,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.433">getScanner</a>(boolean&nbsp;cacheBlocks,
                       boolean&nbsp;pread)</pre>
 </li>
 </ul>
@@ -455,7 +455,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompressionAlgorithm</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.434">getCompressionAlgorithm</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.435">getCompressionAlgorithm</a>()</pre>
 </li>
 </ul>
 <a name="getGeneralBloomFilterMetadata()">
@@ -464,7 +464,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getGeneralBloomFilterMetadata</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.441">getGeneralBloomFilterMetadata</a>()
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.442">getGeneralBloomFilterMetadata</a>()
                                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Retrieves general Bloom filter metadata as appropriate for each
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version.
@@ -479,7 +479,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getDeleteBloomFilterMetadata</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.448">getDeleteBloomFilterMetadata</a>()
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.449">getDeleteBloomFilterMetadata</a>()
                                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Retrieves delete family Bloom filter metadata as appropriate for each
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>  version.
@@ -494,7 +494,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getPath</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.450">getPath</a>()</pre>
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.451">getPath</a>()</pre>
 </li>
 </ul>
 <a name="close(boolean)">
@@ -503,7 +503,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.453">close</a>(boolean&nbsp;evictOnClose)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.454">close</a>(boolean&nbsp;evictOnClose)
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Close method with optional evictOnClose</div>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -516,7 +516,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataBlockEncoding</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.455">getDataBlockEncoding</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.456">getDataBlockEncoding</a>()</pre>
 </li>
 </ul>
 <a name="hasMVCCInfo()">
@@ -525,7 +525,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hasMVCCInfo</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.457">hasMVCCInfo</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.458">hasMVCCInfo</a>()</pre>
 </li>
 </ul>
 <a name="getFileContext()">
@@ -534,7 +534,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.462">getFileContext</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.463">getFileContext</a>()</pre>
 <div class="block">Return the file context of the HFile this reader belongs to</div>
 </li>
 </ul>
@@ -544,7 +544,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isPrimaryReplicaReader</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.464">isPrimaryReplicaReader</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.465">isPrimaryReplicaReader</a>()</pre>
 </li>
 </ul>
 <a name="setPrimaryReplicaReader(boolean)">
@@ -553,7 +553,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setPrimaryReplicaReader</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.466">setPrimaryReplicaReader</a>(boolean&nbsp;isPrimaryReplicaReader)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.467">setPrimaryReplicaReader</a>(boolean&nbsp;isPrimaryReplicaReader)</pre>
 </li>
 </ul>
 <a name="shouldIncludeMemstoreTS()">
@@ -562,7 +562,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldIncludeMemstoreTS</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.468">shouldIncludeMemstoreTS</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.469">shouldIncludeMemstoreTS</a>()</pre>
 </li>
 </ul>
 <a name="isDecodeMemstoreTS()">
@@ -571,7 +571,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isDecodeMemstoreTS</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.470">isDecodeMemstoreTS</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.471">isDecodeMemstoreTS</a>()</pre>
 </li>
 </ul>
 <a name="getEffectiveEncodingInCache(boolean)">
@@ -580,7 +580,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getEffectiveEncodingInCache</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.472">getEffectiveEncodingInCache</a>(boolean&nbsp;isCompaction)</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.473">getEffectiveEncodingInCache</a>(boolean&nbsp;isCompaction)</pre>
 </li>
 </ul>
 <a name="getUncachedBlockReader()">
@@ -589,7 +589,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncachedBlockReader</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.475">getUncachedBlockReader</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.476">getUncachedBlockReader</a>()</pre>
 </li>
 </ul>
 <a name="prefetchComplete()">
@@ -598,7 +598,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>prefetchComplete</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.478">prefetchComplete</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.479">prefetchComplete</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
index b77cb21..ee8776c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
@@ -99,7 +99,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.198">HFile.Writer</a>
+<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.199">HFile.Writer</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a></pre>
 <div class="block">API required to write an <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a></div>
 </li>
@@ -215,7 +215,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MAX_MEMSTORE_TS_KEY</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.200">MAX_MEMSTORE_TS_KEY</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.201">MAX_MEMSTORE_TS_KEY</a></pre>
 <div class="block">Max memstore (mvcc) timestamp in FileInfo</div>
 </li>
 </ul>
@@ -233,7 +233,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>appendFileInfo</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.203">appendFileInfo</a>(byte[]&nbsp;key,
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.204">appendFileInfo</a>(byte[]&nbsp;key,
                   byte[]&nbsp;value)
                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Add an element to the file info map.</div>
@@ -247,7 +247,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>append</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.205">append</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.206">append</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -259,7 +259,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getPath</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.208">getPath</a>()</pre>
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.209">getPath</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the path to this <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a></dd></dl>
 </li>
 </ul>
@@ -269,7 +269,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addInlineBlockWriter</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.214">addInlineBlockWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.html" title="interface in org.apache.hadoop.hbase.io.hfile">InlineBlockWriter</a>&nbsp;bloomWriter)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.215">addInlineBlockWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.html" title="interface in org.apache.hadoop.hbase.io.hfile">InlineBlockWriter</a>&nbsp;bloomWriter)</pre>
 <div class="block">Adds an inline block writer such as a multi-level block index writer or
  a compound Bloom filter writer.</div>
 </li>
@@ -280,7 +280,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>appendMetaBlock</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.221">appendMetaBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;bloomFilterMetaKey,
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.222">appendMetaBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;bloomFilterMetaKey,
                    org.apache.hadoop.io.Writable&nbsp;metaWriter)</pre>
 </li>
 </ul>
@@ -290,7 +290,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addGeneralBloomFilter</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.228">addGeneralBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.229">addGeneralBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw)</pre>
 <div class="block">Store general Bloom filter in the file. This does not deal with Bloom filter
  internals but is necessary, since Bloom filters are stored differently
  in HFile version 1 and version 2.</div>
@@ -302,7 +302,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addDeleteFamilyBloomFilter</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.234">addDeleteFamilyBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.235">addDeleteFamilyBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw)
                                 throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Store delete family Bloom filter in the file, which is only supported in
  HFile V2.</div>
@@ -316,7 +316,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getFileContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.239">getFileContext</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#line.240">getFileContext</a>()</pre>
 <div class="block">Return the file context for the HFile this writer belongs to</div>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
index df8b2db..258dca0 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
@@ -99,7 +99,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.246">HFile.WriterFactory</a>
+<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.247">HFile.WriterFactory</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">This variety of ways to construct writers is used throughout the code, and
  we want to be able to swap writer implementations.</div>
@@ -247,7 +247,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>protected final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.247">conf</a></pre>
+<pre>protected final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.248">conf</a></pre>
 </li>
 </ul>
 <a name="cacheConf">
@@ -256,7 +256,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheConf</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.248">cacheConf</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.249">cacheConf</a></pre>
 </li>
 </ul>
 <a name="fs">
@@ -265,7 +265,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>fs</h4>
-<pre>protected&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.249">fs</a></pre>
+<pre>protected&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.250">fs</a></pre>
 </li>
 </ul>
 <a name="path">
@@ -274,7 +274,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>path</h4>
-<pre>protected&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.250">path</a></pre>
+<pre>protected&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.251">path</a></pre>
 </li>
 </ul>
 <a name="ostream">
@@ -283,7 +283,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ostream</h4>
-<pre>protected&nbsp;org.apache.hadoop.fs.FSDataOutputStream <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.251">ostream</a></pre>
+<pre>protected&nbsp;org.apache.hadoop.fs.FSDataOutputStream <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.252">ostream</a></pre>
 </li>
 </ul>
 <a name="comparator">
@@ -292,7 +292,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>comparator</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.252">comparator</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.253">comparator</a></pre>
 </li>
 </ul>
 <a name="favoredNodes">
@@ -301,7 +301,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>favoredNodes</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true" title="class or interface in java.net">InetSocketAddress</a>[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.254">favoredNodes</a></pre>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true" title="class or interface in java.net">InetSocketAddress</a>[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.255">favoredNodes</a></pre>
 </li>
 </ul>
 <a name="fileContext">
@@ -310,7 +310,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>fileContext</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.255">fileContext</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.256">fileContext</a></pre>
 </li>
 </ul>
 <a name="shouldDropBehind">
@@ -319,7 +319,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>shouldDropBehind</h4>
-<pre>protected&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.256">shouldDropBehind</a></pre>
+<pre>protected&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.257">shouldDropBehind</a></pre>
 </li>
 </ul>
 </li>
@@ -336,7 +336,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFile.WriterFactory</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.258">HFile.WriterFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.259">HFile.WriterFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                    <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
 </li>
 </ul>
@@ -354,7 +354,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>withPath</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.263">withPath</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.264">withPath</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                            org.apache.hadoop.fs.Path&nbsp;path)</pre>
 </li>
 </ul>
@@ -364,7 +364,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>withOutputStream</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.271">withOutputStream</a>(org.apache.hadoop.fs.FSDataOutputStream&nbsp;ostream)</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.272">withOutputStream</a>(org.apache.hadoop.fs.FSDataOutputStream&nbsp;ostream)</pre>
 </li>
 </ul>
 <a name="withComparator(org.apache.hadoop.hbase.CellComparator)">
@@ -373,7 +373,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>withComparator</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.277">withComparator</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator)</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.278">withComparator</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator)</pre>
 </li>
 </ul>
 <a name="withFavoredNodes(java.net.InetSocketAddress[])">
@@ -382,7 +382,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>withFavoredNodes</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.283">withFavoredNodes</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true" title="class or interface in java.net">InetSocketAddress</a>[]&nbsp;favoredNodes)</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.284">withFavoredNodes</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true" title="class or interface in java.net">InetSocketAddress</a>[]&nbsp;favoredNodes)</pre>
 </li>
 </ul>
 <a name="withFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)">
@@ -391,7 +391,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>withFileContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.289">withFileContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext)</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.290">withFileContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext)</pre>
 </li>
 </ul>
 <a name="withShouldDropCacheBehind(boolean)">
@@ -400,7 +400,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>withShouldDropCacheBehind</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.294">withShouldDropCacheBehind</a>(boolean&nbsp;shouldDropBehind)</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.295">withShouldDropCacheBehind</a>(boolean&nbsp;shouldDropBehind)</pre>
 </li>
 </ul>
 <a name="create()">
@@ -409,7 +409,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>create</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Writer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.300">create</a>()
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Writer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#line.301">create</a>()
                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>


[34/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
index de952bf..7fb6887 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.html
@@ -100,39 +100,48 @@
 <hr>
 <br>
 <pre><a href="../../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.89">HFileBlock</a>
+public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.100">HFileBlock</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a></pre>
-<div class="block">Reading <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and 2 blocks, and writing version 2 blocks.
- <ul>
- <li>In version 1 all blocks are always compressed or uncompressed, as
+<div class="block">Reads <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and version 2 blocks but writes version 2 blocks only.
+ Version 2 was introduced in hbase-0.92.0. Does read and write out to the filesystem but also
+ the read and write to Cache.
+
+ <h3>HFileBlock: Version 1</h3>
+ As of this writing, there should be no more version 1 blocks found out in the wild. Version 2
+ as introduced in hbase-0.92.0.
+ In version 1 all blocks are always compressed or uncompressed, as
  specified by the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>'s compression algorithm, with a type-specific
  magic record stored in the beginning of the compressed data (i.e. one needs
  to uncompress the compressed block to determine the block type). There is
  only a single compression algorithm setting for all blocks. Offset and size
  information from the block index are required to read a block.
- <li>In version 2 a block is structured as follows:
+ <h3>HFileBlock: Version 2</h3>
+ In version 2, a block is structured as follows:
  <ul>
- <li>header (see Writer#finishBlock())
+ <li><b>Header:</b> See Writer#putHeader(); header total size is HFILEBLOCK_HEADER_SIZE)
  <ul>
- <li>Magic record identifying the block type (8 bytes)
- <li>Compressed block size, excluding header, including checksum (4 bytes)
- <li>Uncompressed block size, excluding header, excluding checksum (4 bytes)
+ <li>Magic record identifying the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><code>BlockType</code></a> (8 bytes): e.g. <code>DATABLK*</code>
+ <li>Compressed -- a.k.a 'on disk' -- block size, excluding header, but including
+     tailing checksum bytes (4 bytes)
+ <li>Uncompressed block size, excluding header, and excluding checksum bytes (4 bytes)
  <li>The offset of the previous block of the same type (8 bytes). This is
- used to be able to navigate to the previous block without going to the block
+ used to navigate to the previous block without having to go to the block index
  <li>For minorVersions &gt;=1, the ordinal describing checksum type (1 byte)
  <li>For minorVersions &gt;=1, the number of data bytes/checksum chunk (4 bytes)
- <li>For minorVersions &gt;=1, the size of data on disk, including header,
+ <li>For minorVersions &gt;=1, the size of data 'on disk', including header,
  excluding checksums (4 bytes)
  </ul>
  </li>
- <li>Raw/Compressed/Encrypted/Encoded data. The compression algorithm is the
+ <li><b>Raw/Compressed/Encrypted/Encoded data:</b> The compression algorithm is the
  same for all the blocks in the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>, similarly to what was done in
- version 1.
- <li>For minorVersions &gt;=1, a series of 4 byte checksums, one each for
+ version 1. If compression is NONE, this is just raw, serialized Cells.
+ <li><b>Tail:</b> For minorVersions &gt;=1, a series of 4 byte checksums, one each for
  the number of bytes specified by bytesPerChecksum.
  </ul>
- </ul></div>
+ <p>Be aware that when we read from HDFS, we overread pulling in the next blocks' header too.
+ We do this to save having to do two seeks to read an HFileBlock; a seek to read the header
+ to figure lengths, etc., and then another seek to pull in the data.</div>
 </li>
 </ul>
 </div>
@@ -152,19 +161,19 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <th class="colLast" scope="col">Class and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>static interface&nbsp;</code></td>
+<td class="colFirst"><code>(package private) static interface&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a></strong></code>
 <div class="block">An interface allowing to iterate <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock</code></a>s.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>static interface&nbsp;</code></td>
+<td class="colFirst"><code>(package private) static interface&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a></strong></code>
 <div class="block">Something that can be written into a block.</div>
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>static interface&nbsp;</code></td>
+<td class="colFirst"><code>(package private) static interface&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a></strong></code>
 <div class="block">A full-fledged reader with iteration ability.</div>
 </td>
@@ -183,7 +192,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>static class&nbsp;</code></td>
+<td class="colFirst"><code>(package private) static class&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a></strong></code>
 <div class="block">Unified version 2 <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> block writer.</div>
 </td>
@@ -212,7 +221,9 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>(package private) static <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#blockDeserializer">blockDeserializer</a></strong></code>&nbsp;</td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#blockDeserializer">blockDeserializer</a></strong></code>
+<div class="block">Used deserializing blocks from Cache.</div>
+</td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a></code></td>
@@ -235,9 +246,8 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <tr class="altColor">
 <td class="colFirst"><code>(package private) static int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD">CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD</a></strong></code>
-<div class="block">On a checksum failure on a Reader, these many suceeding read
- requests switch back to using hdfs checksums before auto-reenabling
- hbase checksum verification.</div>
+<div class="block">On a checksum failure, do these many succeeding read requests using hdfs checksums before
+ auto-reenabling hbase checksum verification.</div>
 </td>
 </tr>
 <tr class="rowColor">
@@ -254,24 +264,24 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>static int</code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#ENCODED_HEADER_SIZE">ENCODED_HEADER_SIZE</a></strong></code>
-<div class="block">The size of block header when blockType is <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a>.</div>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#EXTRA_SERIALIZATION_SPACE">EXTRA_SERIALIZATION_SPACE</a></strong></code>
+<div class="block">See #blockDeserializer method for more info.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>static int</code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#EXTRA_SERIALIZATION_SPACE">EXTRA_SERIALIZATION_SPACE</a></strong></code>&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#fileContext">fileContext</a></strong></code>
 <div class="block">Meta data that holds meta information on the hfileblock</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#FILL_HEADER">FILL_HEADER</a></strong></code>&nbsp;</td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#LOG">LOG</a></strong></code>&nbsp;</td>
+</tr>
 <tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#memType">memType</a></strong></code>&nbsp;</td>
@@ -318,6 +328,10 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <div class="block">Size of pure data.</div>
 </td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code>private static int</code></td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#UNSET">UNSET</a></strong></code>&nbsp;</td>
+</tr>
 </table>
 </li>
 </ul>
@@ -410,157 +424,153 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#equals(java.lang.Object)">equals</a></strong>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;comparison)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>void</code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#expectType(org.apache.hadoop.hbase.io.hfile.BlockType)">expectType</a></strong>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;expectedType)</code>&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getBlockType()">getBlockType</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a></code></td>
+<tr class="altColor">
+<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getBufferReadOnly()">getBufferReadOnly</a></strong>()</code>
 <div class="block">Returns the buffer this block stores internally.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getBufferReadOnlyWithHeader()">getBufferReadOnlyWithHeader</a></strong>()</code>
 <div class="block">Returns the buffer of this block, including header data.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getBufferWithHeader()">getBufferWithHeader</a></strong>()</code>
 <div class="block">Returns a byte buffer of this block, including header data and checksum, positioned at
  the beginning of header.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getBufferWithoutHeader()">getBufferWithoutHeader</a></strong>()</code>
 <div class="block">Returns a buffer that does not include the header or checksum.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getBytesPerChecksum()">getBytesPerChecksum</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a></code></td>
+<tr class="rowColor">
+<td class="colFirst"><code>(package private) <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getByteStream()">getByteStream</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) byte</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getChecksumType()">getChecksumType</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getDataBlockEncoding()">getDataBlockEncoding</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>short</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getDataBlockEncodingId()">getDataBlockEncodingId</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getDeserializer()">getDeserializer</a></strong>()</code>
 <div class="block">Returns CacheableDeserializer instance which reconstructs original object from ByteBuffer.</div>
 </td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>byte[]</code></td>
+<tr class="altColor">
+<td class="colFirst"><code>(package private) byte[]</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getDummyHeaderForVersion()">getDummyHeaderForVersion</a></strong>()</code>
 <div class="block">Return the appropriate DUMMY_HEADER for the minor version</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static byte[]</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getDummyHeaderForVersion(boolean)">getDummyHeaderForVersion</a></strong>(boolean&nbsp;usesHBaseChecksum)</code>
 <div class="block">Return the appropriate DUMMY_HEADER for the minor version</div>
 </td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a></code></td>
+<tr class="altColor">
+<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getHFileContext()">getHFileContext</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getMemoryType()">getMemoryType</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getNextBlockOnDiskSizeWithHeader()">getNextBlockOnDiskSizeWithHeader</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code>long</code></td>
+<tr class="rowColor">
+<td class="colFirst"><code>(package private) long</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getOffset()">getOffset</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getOnDiskDataSizeWithHeader()">getOnDiskDataSizeWithHeader</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getOnDiskSizeWithHeader()">getOnDiskSizeWithHeader</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>int</code></td>
+<tr class="altColor">
+<td class="colFirst"><code>(package private) int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getOnDiskSizeWithoutHeader()">getOnDiskSizeWithoutHeader</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code>long</code></td>
+<tr class="rowColor">
+<td class="colFirst"><code>(package private) long</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getPrevBlockOffset()">getPrevBlockOffset</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getSerializedLength()">getSerializedLength</a></strong>()</code>
 <div class="block">Returns the length of the ByteBuffer required to serialized the object.</div>
 </td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code>int</code></td>
+<tr class="rowColor">
+<td class="colFirst"><code>(package private) int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getUncompressedSizeWithoutHeader()">getUncompressedSizeWithoutHeader</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#hashCode()">hashCode</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#hasNextBlockHeader()">hasNextBlockHeader</a></strong>()</code>
 <div class="block">Return true when this buffer includes next block's header.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#headerSize()">headerSize</a></strong>()</code>
 <div class="block">Returns the size of this block header.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#headerSize(boolean)">headerSize</a></strong>(boolean&nbsp;usesHBaseChecksum)</code>
 <div class="block">Maps a minor version to the size of the header.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#heapSize()">heapSize</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#isUnpacked()">isUnpacked</a></strong>()</code>
 <div class="block">Return true when this block's buffer has been unpacked, false otherwise.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#overwriteHeader()">overwriteHeader</a></strong>()</code>
 <div class="block">Rewinds <code>buf</code> and writes first 4 header fields.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static boolean</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#positionalReadWithExtra(org.apache.hadoop.fs.FSDataInputStream,%20long,%20byte[],%20int,%20int,%20int)">positionalReadWithExtra</a></strong>(org.apache.hadoop.fs.FSDataInputStream&nbsp;in,
                                               long&nbsp;position,
@@ -568,75 +578,79 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
                                               int&nbsp;bufOffset,
                                               int&nbsp;necessaryLen,
                                               int&nbsp;extraLen)</code>
-<div class="block">Read from an input stream.</div>
+<div class="block">Read from an input stream at least <code>necessaryLen</code> and if possible,
+ <code>extraLen</code> also if available.</div>
 </td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>static boolean</code></td>
+<tr class="altColor">
+<td class="colFirst"><code>(package private) static boolean</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#readWithExtra(java.io.InputStream,%20byte[],%20int,%20int,%20int)">readWithExtra</a></strong>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;in,
                           byte[]&nbsp;buf,
                           int&nbsp;bufOffset,
                           int&nbsp;necessaryLen,
                           int&nbsp;extraLen)</code>
-<div class="block">Read from an input stream.</div>
+<div class="block">Read from an input stream at least <code>necessaryLen</code> and if possible,
+ <code>extraLen</code> also if available.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#sanityCheck()">sanityCheck</a></strong>()</code>
 <div class="block">Checks if the block is internally consistent, i.e.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#sanityCheckAssertion(org.apache.hadoop.hbase.io.hfile.BlockType,%20org.apache.hadoop.hbase.io.hfile.BlockType)">sanityCheckAssertion</a></strong>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromBuf,
                                         <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromField)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#sanityCheckAssertion(long,%20long,%20java.lang.String)">sanityCheckAssertion</a></strong>(long&nbsp;valueFromBuf,
                                         long&nbsp;valueFromField,
                                         <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;fieldName)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#serialize(java.nio.ByteBuffer)">serialize</a></strong>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;destination)</code>
 <div class="block">Serializes its data into destination.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#serializeExtraInfo(java.nio.ByteBuffer)">serializeExtraInfo</a></strong>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;destination)</code>&nbsp;</td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#serializeExtraInfo(java.nio.ByteBuffer)">serializeExtraInfo</a></strong>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;destination)</code>
+<div class="block">Write out the content of EXTRA_SERIALIZATION_SPACE.</div>
+</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#toString()">toString</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#toStringHeader(org.apache.hadoop.hbase.nio.ByteBuff)">toStringHeader</a></strong>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buf)</code>
 <div class="block">Convert the contents of the block header into a human readable string.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#totalChecksumBytes()">totalChecksumBytes</a></strong>()</code>
-<div class="block">Calcuate the number of bytes required to store all the checksums
+<div class="block">Calculate the number of bytes required to store all the checksums
  for this block.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#unpack(org.apache.hadoop.hbase.io.hfile.HFileContext,%20org.apache.hadoop.hbase.io.hfile.HFileBlock.FSReader)">unpack</a></strong>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext,
             <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;reader)</code>
 <div class="block">Retrieves the decompressed/decrypted view of this block.</div>
 </td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>boolean</code></td>
+<tr class="altColor">
+<td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#usesSharedMemory()">usesSharedMemory</a></strong>()</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#validateOnDiskSizeWithoutHeader(int)">validateOnDiskSizeWithoutHeader</a></strong>(int&nbsp;expectedOnDiskSizeWithoutHeader)</code>
 <div class="block">Called after reading a block with provided onDiskSizeWithHeader.</div>
@@ -664,58 +678,54 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <!--   -->
 </a>
 <h3>Field Detail</h3>
-<a name="CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD">
+<a name="LOG">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD</h4>
-<pre>static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.96">CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD</a></pre>
-<div class="block">On a checksum failure on a Reader, these many suceeding read
- requests switch back to using hdfs checksums before auto-reenabling
- hbase checksum verification.</div>
-<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD">Constant Field Values</a></dd></dl>
+<h4>LOG</h4>
+<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.101">LOG</a></pre>
 </li>
 </ul>
-<a name="FILL_HEADER">
+<a name="CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>FILL_HEADER</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.98">FILL_HEADER</a></pre>
-<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.FILL_HEADER">Constant Field Values</a></dd></dl>
+<h4>CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD</h4>
+<pre>static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.107">CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD</a></pre>
+<div class="block">On a checksum failure, do these many succeeding read requests using hdfs checksums before
+ auto-reenabling hbase checksum verification.</div>
+<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD">Constant Field Values</a></dd></dl>
 </li>
 </ul>
-<a name="DONT_FILL_HEADER">
+<a name="UNSET">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>DONT_FILL_HEADER</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.99">DONT_FILL_HEADER</a></pre>
-<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.DONT_FILL_HEADER">Constant Field Values</a></dd></dl>
+<h4>UNSET</h4>
+<pre>private static&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.109">UNSET</a></pre>
 </li>
 </ul>
-<a name="ENCODED_HEADER_SIZE">
+<a name="FILL_HEADER">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>ENCODED_HEADER_SIZE</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.105">ENCODED_HEADER_SIZE</a></pre>
-<div class="block">The size of block header when blockType is <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a>.
- This extends normal header by adding the id of encoder.</div>
-<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.ENCODED_HEADER_SIZE">Constant Field Values</a></dd></dl>
+<h4>FILL_HEADER</h4>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.110">FILL_HEADER</a></pre>
+<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.FILL_HEADER">Constant Field Values</a></dd></dl>
 </li>
 </ul>
-<a name="DUMMY_HEADER_NO_CHECKSUM">
+<a name="DONT_FILL_HEADER">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>DUMMY_HEADER_NO_CHECKSUM</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.108">DUMMY_HEADER_NO_CHECKSUM</a></pre>
+<h4>DONT_FILL_HEADER</h4>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.111">DONT_FILL_HEADER</a></pre>
+<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.DONT_FILL_HEADER">Constant Field Values</a></dd></dl>
 </li>
 </ul>
 <a name="MULTI_BYTE_BUFFER_HEAP_SIZE">
@@ -724,7 +734,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>MULTI_BYTE_BUFFER_HEAP_SIZE</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.112">MULTI_BYTE_BUFFER_HEAP_SIZE</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.114">MULTI_BYTE_BUFFER_HEAP_SIZE</a></pre>
 </li>
 </ul>
 <a name="EXTRA_SERIALIZATION_SPACE">
@@ -733,7 +743,12 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>EXTRA_SERIALIZATION_SPACE</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.116">EXTRA_SERIALIZATION_SPACE</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.124">EXTRA_SERIALIZATION_SPACE</a></pre>
+<div class="block">See #blockDeserializer method for more info.
+ 13 bytes of extra stuff stuck on the end of the HFileBlock that we pull in from HDFS (note,
+ when we read from HDFS, we pull in an HFileBlock AND the header of the next block if one).
+ The 13 bytes are: usesHBaseChecksum (1 byte) + offset of this block (long) +
+ nextBlockOnDiskSizeWithHeader (int).</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.EXTRA_SERIALIZATION_SPACE">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -743,18 +758,48 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>CHECKSUM_SIZE</h4>
-<pre>static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.122">CHECKSUM_SIZE</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.130">CHECKSUM_SIZE</a></pre>
 <div class="block">Each checksum value is an integer that can be stored in 4 bytes.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.CHECKSUM_SIZE">Constant Field Values</a></dd></dl>
 </li>
 </ul>
+<a name="DUMMY_HEADER_NO_CHECKSUM">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>DUMMY_HEADER_NO_CHECKSUM</h4>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.132">DUMMY_HEADER_NO_CHECKSUM</a></pre>
+</li>
+</ul>
 <a name="blockDeserializer">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>blockDeserializer</h4>
-<pre>static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.124">blockDeserializer</a></pre>
+<pre>static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.158">blockDeserializer</a></pre>
+<div class="block">Used deserializing blocks from Cache.
+
+ Serializing to cache is a little hard to follow. See Writer#finishBlock for where it is done.
+ When we start to append to a new HFileBlock,
+ we skip over where the header should go before we start adding Cells. When the block is
+ done, we'll then go back and fill in the header and the checksum tail. Be aware that what
+ gets serialized into the blockcache is a byte array that contains an HFileBlock followed by
+ its checksums and then the header of the next HFileBlock (needed to help navigate), followed
+ again by an extra 13 bytes of meta info needed when time to recreate the HFileBlock from cache.
+
+ ++++++++++++++
+ + HFileBlock +
+ ++++++++++++++
+ + Checksums  +
+ ++++++++++++++
+ + NextHeader +
+ ++++++++++++++
+ + ExtraMeta! +
+ ++++++++++++++
+
+ TODO: Fix it so we do NOT put the NextHeader into blockcache. It is not necessary.</div>
 </li>
 </ul>
 <a name="deserializerIdentifier">
@@ -763,7 +808,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>deserializerIdentifier</h4>
-<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.161">deserializerIdentifier</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.198">deserializerIdentifier</a></pre>
 </li>
 </ul>
 <a name="blockType">
@@ -772,7 +817,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>blockType</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.168">blockType</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.205">blockType</a></pre>
 <div class="block">Type of block. Header field 0.</div>
 </li>
 </ul>
@@ -782,8 +827,9 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>onDiskSizeWithoutHeader</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.171">onDiskSizeWithoutHeader</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.211">onDiskSizeWithoutHeader</a></pre>
 <div class="block">Size on disk excluding header, including checksum. Header field 1.</div>
+<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#putHeader(byte[],%20int,%20int,%20int,%20int)"><code>HFileBlock.Writer.putHeader(byte[], int, int, int, int)</code></a></dd></dl>
 </li>
 </ul>
 <a name="uncompressedSizeWithoutHeader">
@@ -792,8 +838,9 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>uncompressedSizeWithoutHeader</h4>
-<pre>private final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.174">uncompressedSizeWithoutHeader</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.217">uncompressedSizeWithoutHeader</a></pre>
 <div class="block">Size of pure data. Does not include header or checksums. Header field 2.</div>
+<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#putHeader(byte[],%20int,%20int,%20int,%20int)"><code>HFileBlock.Writer.putHeader(byte[], int, int, int, int)</code></a></dd></dl>
 </li>
 </ul>
 <a name="prevBlockOffset">
@@ -802,8 +849,9 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>prevBlockOffset</h4>
-<pre>private final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.177">prevBlockOffset</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.223">prevBlockOffset</a></pre>
 <div class="block">The offset of the previous block on disk. Header field 3.</div>
+<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#putHeader(byte[],%20int,%20int,%20int,%20int)"><code>HFileBlock.Writer.putHeader(byte[], int, int, int, int)</code></a></dd></dl>
 </li>
 </ul>
 <a name="onDiskDataSizeWithHeader">
@@ -812,9 +860,10 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>onDiskDataSizeWithHeader</h4>
-<pre>private final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.183">onDiskDataSizeWithHeader</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.230">onDiskDataSizeWithHeader</a></pre>
 <div class="block">Size on disk of header + data. Excludes checksum. Header field 6,
  OR calculated from <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#onDiskSizeWithoutHeader"><code>onDiskSizeWithoutHeader</code></a> when using HDFS checksum.</div>
+<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#putHeader(byte[],%20int,%20int,%20int,%20int)"><code>HFileBlock.Writer.putHeader(byte[], int, int, int, int)</code></a></dd></dl>
 </li>
 </ul>
 <a name="buf">
@@ -823,7 +872,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>buf</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.186">buf</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.233">buf</a></pre>
 <div class="block">The in-memory representation of the hfile block</div>
 </li>
 </ul>
@@ -833,7 +882,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>fileContext</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.189">fileContext</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.236">fileContext</a></pre>
 <div class="block">Meta data that holds meta information on the hfileblock</div>
 </li>
 </ul>
@@ -843,7 +892,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>offset</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.195">offset</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.242">offset</a></pre>
 <div class="block">The offset of this block in the file. Populated by the reader for
  convenience of access. This offset is not part of the block header.</div>
 </li>
@@ -854,7 +903,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>nextBlockOnDiskSizeWithHeader</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.202">nextBlockOnDiskSizeWithHeader</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.249">nextBlockOnDiskSizeWithHeader</a></pre>
 <div class="block">The on-disk size of the next block, including the header, obtained by
  peeking into the first <a href="../../../../../../org/apache/hadoop/hbase/HConstants.html#HFILEBLOCK_HEADER_SIZE"><code>HConstants.HFILEBLOCK_HEADER_SIZE</code></a> bytes of the next block's
  header, or -1 if unknown.</div>
@@ -866,7 +915,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>memType</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.204">memType</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.251">memType</a></pre>
 </li>
 </ul>
 </li>
@@ -883,7 +932,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.222">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.269">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType,
           int&nbsp;onDiskSizeWithoutHeader,
           int&nbsp;uncompressedSizeWithoutHeader,
           long&nbsp;prevBlockOffset,
@@ -893,10 +942,10 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
           int&nbsp;onDiskDataSizeWithHeader,
           <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext)</pre>
 <div class="block">Creates a new <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> block from the given fields. This constructor
- is mostly used when the block data has already been read and uncompressed,
+ is used when the block data has already been read and uncompressed,
  and is sitting in a byte buffer.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>blockType</code> - the type of this block, see <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><code>BlockType</code></a></dd><dd><code>onDiskSizeWithoutHeader</code> - see <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#onDiskSizeWithoutHeader"><code>onDiskSizeWithoutHeader</code></a></dd><dd><code>uncompressedSizeWithoutHeader</code> - see <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#uncompressedSizeWithoutHeader"><code>uncompressedSizeWithoutHeader</code></a></dd><dd><code>prevBlockOffset</code> - see <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#prevBlockOffset"><code>prevBlockOffset</code></a></dd><dd><code>buf</code> - block header (<a href="../../../../../../org/apache/hadoop/hbase/HConstants.html#HFILEBLOCK_HEADER_SIZE"><code>HConstants.HFILEBLOCK_HEA
 DER_SIZE</code></a> bytes) followed by
-          uncompressed data. This</dd><dd><code>fillHeader</code> - when true, parse <code>buf</code> and override the first 4 header fields.</dd><dd><code>offset</code> - the file offset the block was read from</dd><dd><code>onDiskDataSizeWithHeader</code> - see <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#onDiskDataSizeWithHeader"><code>onDiskDataSizeWithHeader</code></a></dd><dd><code>fileContext</code> - HFile meta data</dd></dl>
+          uncompressed data.</dd><dd><code>fillHeader</code> - when true, write the first 4 header fields into passed buffer.</dd><dd><code>offset</code> - the file offset the block was read from</dd><dd><code>onDiskDataSizeWithHeader</code> - see <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#onDiskDataSizeWithHeader"><code>onDiskDataSizeWithHeader</code></a></dd><dd><code>fileContext</code> - HFile meta data</dd></dl>
 </li>
 </ul>
 <a name="HFileBlock(org.apache.hadoop.hbase.io.hfile.BlockType, int, int, long, java.nio.ByteBuffer, boolean, long, int, org.apache.hadoop.hbase.io.hfile.HFileContext)">
@@ -905,7 +954,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.238">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.286">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType,
           int&nbsp;onDiskSizeWithoutHeader,
           int&nbsp;uncompressedSizeWithoutHeader,
           long&nbsp;prevBlockOffset,
@@ -922,7 +971,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.248">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;that)</pre>
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.296">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;that)</pre>
 <div class="block">Copy constructor. Creates a shallow copy of <code>that</code>'s buffer.</div>
 </li>
 </ul>
@@ -932,7 +981,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.260">HFileBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;b,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.308">HFileBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;b,
           boolean&nbsp;usesHBaseChecksum)
      throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -945,7 +994,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.270">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.318">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b,
           boolean&nbsp;usesHBaseChecksum)
      throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Creates a block from an existing buffer starting with a header. Rewinds
@@ -962,7 +1011,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileBlock</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.280">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.328">HFileBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;b,
           boolean&nbsp;usesHBaseChecksum,
           <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile">Cacheable.MemoryType</a>&nbsp;memType)
      throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -988,7 +1037,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockType</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.304">getBlockType</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.352">getBlockType</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html#getBlockType()">getBlockType</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a></code></dd>
@@ -1001,7 +1050,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataBlockEncodingId</h4>
-<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.309">getDataBlockEncodingId</a>()</pre>
+<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.357">getDataBlockEncodingId</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>get data block encoding id that was used to encode this block</dd></dl>
 </li>
 </ul>
@@ -1011,7 +1060,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithHeader</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.320">getOnDiskSizeWithHeader</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.368">getOnDiskSizeWithHeader</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the on-disk size of header + data part + checksum.</dd></dl>
 </li>
 </ul>
@@ -1021,7 +1070,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithoutHeader</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.327">getOnDiskSizeWithoutHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.375">getOnDiskSizeWithoutHeader</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the on-disk size of the data part + checksum (header excluded).</dd></dl>
 </li>
 </ul>
@@ -1031,7 +1080,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncompressedSizeWithoutHeader</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.334">getUncompressedSizeWithoutHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.382">getUncompressedSizeWithoutHeader</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the uncompressed size of data part (header and checksum excluded).</dd></dl>
 </li>
 </ul>
@@ -1041,7 +1090,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getPrevBlockOffset</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.342">getPrevBlockOffset</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.390">getPrevBlockOffset</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the offset of the previous block of the same type in the file, or
          -1 if unknown</dd></dl>
 </li>
@@ -1052,7 +1101,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>overwriteHeader</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.350">overwriteHeader</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.398">overwriteHeader</a>()</pre>
 <div class="block">Rewinds <code>buf</code> and writes first 4 header fields. <code>buf</code> position
  is modified as side-effect.</div>
 </li>
@@ -1063,7 +1112,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBufferWithoutHeader</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.368">getBufferWithoutHeader</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.416">getBufferWithoutHeader</a>()</pre>
 <div class="block">Returns a buffer that does not include the header or checksum.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the buffer with header skipped and checksum omitted.</dd></dl>
 </li>
@@ -1074,7 +1123,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBufferReadOnly</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.384">getBufferReadOnly</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.432">getBufferReadOnly</a>()</pre>
 <div class="block">Returns the buffer this block stores internally. The clients must not
  modify the buffer object. This method has to be public because it is used
  in <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>CompoundBloomFilter</code></a> to avoid object creation on every Bloom
@@ -1089,7 +1138,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBufferReadOnlyWithHeader</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.397">getBufferReadOnlyWithHeader</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.445">getBufferReadOnlyWithHeader</a>()</pre>
 <div class="block">Returns the buffer of this block, including header data. The clients must
  not modify the buffer object. This method has to be public because it is
  used in <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketCache</code></a> to avoid buffer copy.</div>
@@ -1102,7 +1151,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getBufferWithHeader</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.408">getBufferWithHeader</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.456">getBufferWithHeader</a>()</pre>
 <div class="block">Returns a byte buffer of this block, including header data and checksum, positioned at
  the beginning of header. The underlying data array is not copied.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the byte buffer with header and checksum included</dd></dl>
@@ -1114,7 +1163,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheckAssertion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.414">sanityCheckAssertion</a>(long&nbsp;valueFromBuf,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.462">sanityCheckAssertion</a>(long&nbsp;valueFromBuf,
                         long&nbsp;valueFromField,
                         <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;fieldName)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1128,7 +1177,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheckAssertion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.422">sanityCheckAssertion</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromBuf,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.470">sanityCheckAssertion</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromBuf,
                         <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromField)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -1141,7 +1190,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>sanityCheck</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.437">sanityCheck</a>()
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.485">sanityCheck</a>()
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Checks if the block is internally consistent, i.e. the first
  <a href="../../../../../../org/apache/hadoop/hbase/HConstants.html#HFILEBLOCK_HEADER_SIZE"><code>HConstants.HFILEBLOCK_HEADER_SIZE</code></a> bytes of the buffer contain a
@@ -1158,7 +1207,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.474">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.522">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -1171,7 +1220,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>validateOnDiskSizeWithoutHeader</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.517">validateOnDiskSizeWithoutHeader</a>(int&nbsp;expectedOnDiskSizeWithoutHeader)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.565">validateOnDiskSizeWithoutHeader</a>(int&nbsp;expectedOnDiskSizeWithoutHeader)
                                       throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Called after reading a block with provided onDiskSizeWithHeader.</div>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -1184,7 +1233,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>unpack</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.542">unpack</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.590">unpack</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext,
                 <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;reader)
             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Retrieves the decompressed/decrypted view of this block. An encoded block remains in its
@@ -1199,7 +1248,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>hasNextBlockHeader</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.584">hasNextBlockHeader</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.632">hasNextBlockHeader</a>()</pre>
 <div class="block">Return true when this buffer includes next block's header.</div>
 </li>
 </ul>
@@ -1209,7 +1258,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>allocateBuffer</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.593">allocateBuffer</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.641">allocateBuffer</a>()</pre>
 <div class="block">Always allocates a new buffer of the correct size. Copies header bytes
  from the existing buffer. Does not change header fields.
  Reserve room to keep checksum bytes too.</div>
@@ -1221,7 +1270,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>isUnpacked</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.616">isUnpacked</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.664">isUnpacked</a>()</pre>
 <div class="block">Return true when this block's buffer has been unpacked, false otherwise. Note this is a
  calculated heuristic, not tracked attribute of the block.</div>
 </li>
@@ -1232,33 +1281,20 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>assumeUncompressed</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.625">assumeUncompressed</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.673">assumeUncompressed</a>()
                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">An additional sanity-check in case no compression or encryption is being used.</div>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
 </li>
 </ul>
-<a name="expectType(org.apache.hadoop.hbase.io.hfile.BlockType)">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>expectType</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.639">expectType</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;expectedType)
-                throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<dl><dt><span class="strong">Parameters:</span></dt><dd><code>expectedType</code> - the expected type of this block</dd>
-<dt><span class="strong">Throws:</span></dt>
-<dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if this block's type is different than expected</dd></dl>
-</li>
-</ul>
 <a name="getOffset()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>getOffset</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.647">getOffset</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.684">getOffset</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the offset of this block in the file it was read from</dd></dl>
 </li>
 </ul>
@@ -1268,7 +1304,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getByteStream</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.658">getByteStream</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.695">getByteStream</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>a byte stream reading the data + checksum of this block</dd></dl>
 </li>
 </ul>
@@ -1278,7 +1314,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.665">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.702">heapSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize()">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -1292,18 +1328,17 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>readWithExtra</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.702">readWithExtra</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;in,
+<pre>static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.738">readWithExtra</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;in,
                     byte[]&nbsp;buf,
                     int&nbsp;bufOffset,
                     int&nbsp;necessaryLen,
                     int&nbsp;extraLen)
-                             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Read from an input stream. Analogous to
+                      throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Read from an input stream at least <code>necessaryLen</code> and if possible,
+ <code>extraLen</code> also if available. Analogous to
  <code>IOUtils.readFully(InputStream, byte[], int, int)</code>, but specifies a
- number of "extra" bytes that would be desirable but not absolutely
- necessary to read.</div>
-<dl><dt><span class="strong">Parameters:</span></dt><dd><code>in</code> - the input stream to read from</dd><dd><code>buf</code> - the buffer to read into</dd><dd><code>bufOffset</code> - the destination offset in the buffer</dd><dd><code>necessaryLen</code> - the number of bytes that are absolutely necessary to
-          read</dd><dd><code>extraLen</code> - the number of extra bytes that would be nice to read</dd>
+ number of "extra" bytes to also optionally read.</div>
+<dl><dt><span class="strong">Parameters:</span></dt><dd><code>in</code> - the input stream to read from</dd><dd><code>buf</code> - the buffer to read into</dd><dd><code>bufOffset</code> - the destination offset in the buffer</dd><dd><code>necessaryLen</code> - the number of bytes that are absolutely necessary to read</dd><dd><code>extraLen</code> - the number of extra bytes that would be nice to read</dd>
 <dt><span class="strong">Returns:</span></dt><dd>true if succeeded reading the extra bytes</dd>
 <dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if failed to read the necessary bytes</dd></dl>
@@ -1315,14 +1350,15 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>positionalReadWithExtra</h4>
-<pre>static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.743">positionalReadWithExtra</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;in,
+<pre>static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.780">positionalReadWithExtra</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;in,
                               long&nbsp;position,
                               byte[]&nbsp;buf,
                               int&nbsp;bufOffset,
                               int&nbsp;necessaryLen,
                               int&nbsp;extraLen)
                                 throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Read from an input stream. Analogous to
+<div class="block">Read from an input stream at least <code>necessaryLen</code> and if possible,
+ <code>extraLen</code> also if available. Analogous to
  <code>IOUtils.readFully(InputStream, byte[], int, int)</code>, but uses
  positional read and specifies a number of "extra" bytes that would be
  desirable but not absolutely necessary to read.</div>
@@ -1340,7 +1376,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getNextBlockOnDiskSizeWithHeader</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.768">getNextBlockOnDiskSizeWithHeader</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.805">getNextBlockOnDiskSizeWithHeader</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the on-disk size of the next block (including the header size)
          that was read by peeking into the next block's header</dd></dl>
 </li>
@@ -1351,7 +1387,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>getSerializedLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1764">getSerializedLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1823">getSerializedLength</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html#getSerializedLength()">Cacheable</a></code></strong></div>
 <div class="block">Returns the length of the ByteBuffer required to serialized the object. If the
  object cannot be serialized, it should return 0.</div>
@@ -1367,7 +1403,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable
 <ul class="blockList">
 <li class="blockList">
 <h4>serialize</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1774">serialize</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;destination)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock

<TRUNCATED>

[47/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
index 68b00ca..91572ee 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
@@ -45,631 +45,630 @@
 <span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.TableName;<a name="line.37"></a>
 <span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.client.Admin;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.client.HTable;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.client.RegionLocator;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.client.Result;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.client.Table;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.Addressing;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.RegionSizeCalculator;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.net.DNS;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.util.StringUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span><a name="line.58"></a>
-<span class="sourceLineNo">059</span>/**<a name="line.59"></a>
-<span class="sourceLineNo">060</span> * A base for {@link TableInputFormat}s. Receives a {@link Connection}, a {@link TableName},<a name="line.60"></a>
-<span class="sourceLineNo">061</span> * an {@link Scan} instance that defines the input columns etc. Subclasses may use<a name="line.61"></a>
-<span class="sourceLineNo">062</span> * other TableRecordReader implementations.<a name="line.62"></a>
-<span class="sourceLineNo">063</span> *<a name="line.63"></a>
-<span class="sourceLineNo">064</span> * Subclasses MUST ensure initializeTable(Connection, TableName) is called for an instance to<a name="line.64"></a>
-<span class="sourceLineNo">065</span> * function properly. Each of the entry points to this class used by the MapReduce framework,<a name="line.65"></a>
-<span class="sourceLineNo">066</span> * {@link #createRecordReader(InputSplit, TaskAttemptContext)} and {@link #getSplits(JobContext)},<a name="line.66"></a>
-<span class="sourceLineNo">067</span> * will call {@link #initialize(JobContext)} as a convenient centralized location to handle<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * retrieving the necessary configuration information. If your subclass overrides either of these<a name="line.68"></a>
-<span class="sourceLineNo">069</span> * methods, either call the parent version or call initialize yourself.<a name="line.69"></a>
-<span class="sourceLineNo">070</span> *<a name="line.70"></a>
-<span class="sourceLineNo">071</span> * &lt;p&gt;<a name="line.71"></a>
-<span class="sourceLineNo">072</span> * An example of a subclass:<a name="line.72"></a>
-<span class="sourceLineNo">073</span> * &lt;pre&gt;<a name="line.73"></a>
-<span class="sourceLineNo">074</span> *   class ExampleTIF extends TableInputFormatBase {<a name="line.74"></a>
-<span class="sourceLineNo">075</span> *<a name="line.75"></a>
-<span class="sourceLineNo">076</span> *     {@literal @}Override<a name="line.76"></a>
-<span class="sourceLineNo">077</span> *     protected void initialize(JobContext context) throws IOException {<a name="line.77"></a>
-<span class="sourceLineNo">078</span> *       // We are responsible for the lifecycle of this connection until we hand it over in<a name="line.78"></a>
-<span class="sourceLineNo">079</span> *       // initializeTable.<a name="line.79"></a>
-<span class="sourceLineNo">080</span> *       Connection connection = ConnectionFactory.createConnection(HBaseConfiguration.create(<a name="line.80"></a>
-<span class="sourceLineNo">081</span> *              job.getConfiguration()));<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *       TableName tableName = TableName.valueOf("exampleTable");<a name="line.82"></a>
-<span class="sourceLineNo">083</span> *       // mandatory. once passed here, TableInputFormatBase will handle closing the connection.<a name="line.83"></a>
-<span class="sourceLineNo">084</span> *       initializeTable(connection, tableName);<a name="line.84"></a>
-<span class="sourceLineNo">085</span> *       byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),<a name="line.85"></a>
-<span class="sourceLineNo">086</span> *         Bytes.toBytes("columnB") };<a name="line.86"></a>
-<span class="sourceLineNo">087</span> *       // optional, by default we'll get everything for the table.<a name="line.87"></a>
-<span class="sourceLineNo">088</span> *       Scan scan = new Scan();<a name="line.88"></a>
-<span class="sourceLineNo">089</span> *       for (byte[] family : inputColumns) {<a name="line.89"></a>
-<span class="sourceLineNo">090</span> *         scan.addFamily(family);<a name="line.90"></a>
-<span class="sourceLineNo">091</span> *       }<a name="line.91"></a>
-<span class="sourceLineNo">092</span> *       Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));<a name="line.92"></a>
-<span class="sourceLineNo">093</span> *       scan.setFilter(exampleFilter);<a name="line.93"></a>
-<span class="sourceLineNo">094</span> *       setScan(scan);<a name="line.94"></a>
-<span class="sourceLineNo">095</span> *     }<a name="line.95"></a>
-<span class="sourceLineNo">096</span> *   }<a name="line.96"></a>
-<span class="sourceLineNo">097</span> * &lt;/pre&gt;<a name="line.97"></a>
-<span class="sourceLineNo">098</span> */<a name="line.98"></a>
-<span class="sourceLineNo">099</span>@InterfaceAudience.Public<a name="line.99"></a>
-<span class="sourceLineNo">100</span>@InterfaceStability.Stable<a name="line.100"></a>
-<span class="sourceLineNo">101</span>public abstract class TableInputFormatBase<a name="line.101"></a>
-<span class="sourceLineNo">102</span>extends InputFormat&lt;ImmutableBytesWritable, Result&gt; {<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>  /** Specify if we enable auto-balance for input in M/R jobs.*/<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  public static final String MAPREDUCE_INPUT_AUTOBALANCE = "hbase.mapreduce.input.autobalance";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /** Specify if ratio for data skew in M/R jobs, it goes well with the enabling hbase.mapreduce<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * .input.autobalance property.*/<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  public static final String INPUT_AUTOBALANCE_MAXSKEWRATIO = "hbase.mapreduce.input.autobalance" +<a name="line.108"></a>
-<span class="sourceLineNo">109</span>          ".maxskewratio";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  /** Specify if the row key in table is text (ASCII between 32~126),<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * default is true. False means the table is using binary row key*/<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public static final String TABLE_ROW_TEXTKEY = "hbase.table.row.textkey";<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>  private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class);<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  private static final String NOT_INITIALIZED = "The input format instance has not been properly " +<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      "initialized. Ensure you call initializeTable either in your constructor or initialize " +<a name="line.117"></a>
-<span class="sourceLineNo">118</span>      "method";<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  private static final String INITIALIZATION_ERROR = "Cannot create a record reader because of a" +<a name="line.119"></a>
-<span class="sourceLineNo">120</span>            " previous error. Please look at the previous logs lines from" +<a name="line.120"></a>
-<span class="sourceLineNo">121</span>            " the task's full log for more details.";<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>  /** Holds the details for the internal scanner.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   *<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * @see Scan */<a name="line.125"></a>
-<span class="sourceLineNo">126</span>  private Scan scan = null;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>  /** The {@link Admin}. */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  private Admin admin;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  /** The {@link Table} to scan. */<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  private Table table;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  /** The {@link RegionLocator} of the table. */<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  private RegionLocator regionLocator;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  /** The reader scanning the table, can be a custom one. */<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  private TableRecordReader tableRecordReader = null;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  /** The underlying {@link Connection} of the table. */<a name="line.135"></a>
-<span class="sourceLineNo">136</span>  private Connection connection;<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>  <a name="line.138"></a>
-<span class="sourceLineNo">139</span>  /** The reverse DNS lookup cache mapping: IPAddress =&gt; HostName */<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  private HashMap&lt;InetAddress, String&gt; reverseDNSCacheMap =<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    new HashMap&lt;InetAddress, String&gt;();<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>  /**<a name="line.143"></a>
-<span class="sourceLineNo">144</span>   * Builds a {@link TableRecordReader}. If no {@link TableRecordReader} was provided, uses<a name="line.144"></a>
-<span class="sourceLineNo">145</span>   * the default.<a name="line.145"></a>
-<span class="sourceLineNo">146</span>   *<a name="line.146"></a>
-<span class="sourceLineNo">147</span>   * @param split  The split to work with.<a name="line.147"></a>
-<span class="sourceLineNo">148</span>   * @param context  The current context.<a name="line.148"></a>
-<span class="sourceLineNo">149</span>   * @return The newly created record reader.<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   * @throws IOException When creating the reader fails.<a name="line.150"></a>
-<span class="sourceLineNo">151</span>   * @see org.apache.hadoop.mapreduce.InputFormat#createRecordReader(<a name="line.151"></a>
-<span class="sourceLineNo">152</span>   *   org.apache.hadoop.mapreduce.InputSplit,<a name="line.152"></a>
-<span class="sourceLineNo">153</span>   *   org.apache.hadoop.mapreduce.TaskAttemptContext)<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   */<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  @Override<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  public RecordReader&lt;ImmutableBytesWritable, Result&gt; createRecordReader(<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      InputSplit split, TaskAttemptContext context)<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  throws IOException {<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    // Just in case a subclass is relying on JobConfigurable magic.<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    if (table == null) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      initialize(context);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    }<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    // null check in case our child overrides getTable to not throw.<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    try {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      if (getTable() == null) {<a name="line.165"></a>
-<span class="sourceLineNo">166</span>        // initialize() must not have been implemented in the subclass.<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        throw new IOException(INITIALIZATION_ERROR);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>      }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    } catch (IllegalStateException exception) {<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      throw new IOException(INITIALIZATION_ERROR, exception);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    TableSplit tSplit = (TableSplit) split;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    LOG.info("Input split length: " + StringUtils.humanReadableInt(tSplit.getLength()) + " bytes.");<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    final TableRecordReader trr =<a name="line.174"></a>
-<span class="sourceLineNo">175</span>        this.tableRecordReader != null ? this.tableRecordReader : new TableRecordReader();<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    Scan sc = new Scan(this.scan);<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    sc.setStartRow(tSplit.getStartRow());<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    sc.setStopRow(tSplit.getEndRow());<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    trr.setScan(sc);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    trr.setTable(getTable());<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    return new RecordReader&lt;ImmutableBytesWritable, Result&gt;() {<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>      @Override<a name="line.183"></a>
-<span class="sourceLineNo">184</span>      public void close() throws IOException {<a name="line.184"></a>
-<span class="sourceLineNo">185</span>        trr.close();<a name="line.185"></a>
-<span class="sourceLineNo">186</span>        closeTable();<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      }<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>      @Override<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      public ImmutableBytesWritable getCurrentKey() throws IOException, InterruptedException {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        return trr.getCurrentKey();<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      }<a name="line.192"></a>
-<span class="sourceLineNo">193</span><a name="line.193"></a>
-<span class="sourceLineNo">194</span>      @Override<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      public Result getCurrentValue() throws IOException, InterruptedException {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        return trr.getCurrentValue();<a name="line.196"></a>
-<span class="sourceLineNo">197</span>      }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>      @Override<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      public float getProgress() throws IOException, InterruptedException {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        return trr.getProgress();<a name="line.201"></a>
-<span class="sourceLineNo">202</span>      }<a name="line.202"></a>
-<span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>      @Override<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      public void initialize(InputSplit inputsplit, TaskAttemptContext context) throws IOException,<a name="line.205"></a>
-<span class="sourceLineNo">206</span>          InterruptedException {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        trr.initialize(inputsplit, context);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      }<a name="line.208"></a>
-<span class="sourceLineNo">209</span><a name="line.209"></a>
-<span class="sourceLineNo">210</span>      @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      public boolean nextKeyValue() throws IOException, InterruptedException {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        return trr.nextKeyValue();<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    };<a name="line.214"></a>
-<span class="sourceLineNo">215</span>  }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  protected Pair&lt;byte[][],byte[][]&gt; getStartEndKeys() throws IOException {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    return getRegionLocator().getStartEndKeys();<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  }<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>  /**<a name="line.221"></a>
-<span class="sourceLineNo">222</span>   * Calculates the splits that will serve as input for the map tasks. The<a name="line.222"></a>
-<span class="sourceLineNo">223</span>   * number of splits matches the number of regions in a table.<a name="line.223"></a>
-<span class="sourceLineNo">224</span>   *<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * @param context  The current job context.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * @return The list of input splits.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   * @throws IOException When creating the list of splits fails.<a name="line.227"></a>
-<span class="sourceLineNo">228</span>   * @see org.apache.hadoop.mapreduce.InputFormat#getSplits(<a name="line.228"></a>
-<span class="sourceLineNo">229</span>   *   org.apache.hadoop.mapreduce.JobContext)<a name="line.229"></a>
-<span class="sourceLineNo">230</span>   */<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  @Override<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  public List&lt;InputSplit&gt; getSplits(JobContext context) throws IOException {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    boolean closeOnFinish = false;<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>    // Just in case a subclass is relying on JobConfigurable magic.<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    if (table == null) {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      initialize(context);<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      closeOnFinish = true;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    // null check in case our child overrides getTable to not throw.<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    try {<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      if (getTable() == null) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        // initialize() must not have been implemented in the subclass.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>        throw new IOException(INITIALIZATION_ERROR);<a name="line.245"></a>
-<span class="sourceLineNo">246</span>      }<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    } catch (IllegalStateException exception) {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      throw new IOException(INITIALIZATION_ERROR, exception);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    }<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>    try {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      RegionSizeCalculator sizeCalculator =<a name="line.252"></a>
-<span class="sourceLineNo">253</span>          new RegionSizeCalculator(getRegionLocator(), getAdmin());<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      <a name="line.254"></a>
-<span class="sourceLineNo">255</span>      TableName tableName = getTable().getName();<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  <a name="line.256"></a>
-<span class="sourceLineNo">257</span>      Pair&lt;byte[][], byte[][]&gt; keys = getStartEndKeys();<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      if (keys == null || keys.getFirst() == null ||<a name="line.258"></a>
-<span class="sourceLineNo">259</span>          keys.getFirst().length == 0) {<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        HRegionLocation regLoc =<a name="line.260"></a>
-<span class="sourceLineNo">261</span>            getRegionLocator().getRegionLocation(HConstants.EMPTY_BYTE_ARRAY, false);<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        if (null == regLoc) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>          throw new IOException("Expecting at least one region.");<a name="line.263"></a>
-<span class="sourceLineNo">264</span>        }<a name="line.264"></a>
-<span class="sourceLineNo">265</span>        List&lt;InputSplit&gt; splits = new ArrayList&lt;InputSplit&gt;(1);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        long regionSize = sizeCalculator.getRegionSize(regLoc.getRegionInfo().getRegionName());<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        TableSplit split = new TableSplit(tableName, scan,<a name="line.267"></a>
-<span class="sourceLineNo">268</span>            HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc<a name="line.268"></a>
-<span class="sourceLineNo">269</span>                .getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0], regionSize);<a name="line.269"></a>
-<span class="sourceLineNo">270</span>        splits.add(split);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        return splits;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      List&lt;InputSplit&gt; splits = new ArrayList&lt;InputSplit&gt;(keys.getFirst().length);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      for (int i = 0; i &lt; keys.getFirst().length; i++) {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        if (!includeRegionInSplit(keys.getFirst()[i], keys.getSecond()[i])) {<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          continue;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>        }<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        HRegionLocation location = getRegionLocator().getRegionLocation(keys.getFirst()[i], false);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>        // The below InetSocketAddress creation does a name resolution.<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        InetSocketAddress isa = new InetSocketAddress(location.getHostname(), location.getPort());<a name="line.280"></a>
-<span class="sourceLineNo">281</span>        if (isa.isUnresolved()) {<a name="line.281"></a>
-<span class="sourceLineNo">282</span>          LOG.warn("Failed resolve " + isa);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>        }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        InetAddress regionAddress = isa.getAddress();<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        String regionLocation;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        regionLocation = reverseDNS(regionAddress);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  <a name="line.287"></a>
-<span class="sourceLineNo">288</span>        byte[] startRow = scan.getStartRow();<a name="line.288"></a>
-<span class="sourceLineNo">289</span>        byte[] stopRow = scan.getStopRow();<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        // determine if the given start an stop key fall into the region<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        if ((startRow.length == 0 || keys.getSecond()[i].length == 0 ||<a name="line.291"></a>
-<span class="sourceLineNo">292</span>            Bytes.compareTo(startRow, keys.getSecond()[i]) &lt; 0) &amp;&amp;<a name="line.292"></a>
-<span class="sourceLineNo">293</span>            (stopRow.length == 0 ||<a name="line.293"></a>
-<span class="sourceLineNo">294</span>             Bytes.compareTo(stopRow, keys.getFirst()[i]) &gt; 0)) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>          byte[] splitStart = startRow.length == 0 ||<a name="line.295"></a>
-<span class="sourceLineNo">296</span>            Bytes.compareTo(keys.getFirst()[i], startRow) &gt;= 0 ?<a name="line.296"></a>
-<span class="sourceLineNo">297</span>              keys.getFirst()[i] : startRow;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>          byte[] splitStop = (stopRow.length == 0 ||<a name="line.298"></a>
-<span class="sourceLineNo">299</span>            Bytes.compareTo(keys.getSecond()[i], stopRow) &lt;= 0) &amp;&amp;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>            keys.getSecond()[i].length &gt; 0 ?<a name="line.300"></a>
-<span class="sourceLineNo">301</span>              keys.getSecond()[i] : stopRow;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  <a name="line.302"></a>
-<span class="sourceLineNo">303</span>          byte[] regionName = location.getRegionInfo().getRegionName();<a name="line.303"></a>
-<span class="sourceLineNo">304</span>          long regionSize = sizeCalculator.getRegionSize(regionName);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>          TableSplit split = new TableSplit(tableName, scan,<a name="line.305"></a>
-<span class="sourceLineNo">306</span>            splitStart, splitStop, regionLocation, regionSize);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>          splits.add(split);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          if (LOG.isDebugEnabled()) {<a name="line.308"></a>
-<span class="sourceLineNo">309</span>            LOG.debug("getSplits: split -&gt; " + i + " -&gt; " + split);<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          }<a name="line.310"></a>
-<span class="sourceLineNo">311</span>        }<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      //The default value of "hbase.mapreduce.input.autobalance" is false, which means not enabled.<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      boolean enableAutoBalance = context.getConfiguration()<a name="line.314"></a>
-<span class="sourceLineNo">315</span>        .getBoolean(MAPREDUCE_INPUT_AUTOBALANCE, false);<a name="line.315"></a>
-<span class="sourceLineNo">316</span>      if (enableAutoBalance) {<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        long totalRegionSize=0;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        for (int i = 0; i &lt; splits.size(); i++){<a name="line.318"></a>
-<span class="sourceLineNo">319</span>          TableSplit ts = (TableSplit)splits.get(i);<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          totalRegionSize += ts.getLength();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        }<a name="line.321"></a>
-<span class="sourceLineNo">322</span>        long averageRegionSize = totalRegionSize / splits.size();<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        // the averageRegionSize must be positive.<a name="line.323"></a>
-<span class="sourceLineNo">324</span>        if (averageRegionSize &lt;= 0) {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>            LOG.warn("The averageRegionSize is not positive: "+ averageRegionSize + ", " +<a name="line.325"></a>
-<span class="sourceLineNo">326</span>                    "set it to 1.");<a name="line.326"></a>
-<span class="sourceLineNo">327</span>            averageRegionSize = 1;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        return calculateRebalancedSplits(splits, context, averageRegionSize);<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      } else {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        return splits;<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      }<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    } finally {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      if (closeOnFinish) {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>        closeTable();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>      }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    }<a name="line.337"></a>
-<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>  String reverseDNS(InetAddress ipAddress) throws UnknownHostException {<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    String hostName = this.reverseDNSCacheMap.get(ipAddress);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    if (hostName == null) {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      String ipAddressString = null;<a name="line.343"></a>
-<span class="sourceLineNo">344</span>      try {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        ipAddressString = DNS.reverseDns(ipAddress, null);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      } catch (Exception e) {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        // We can use InetAddress in case the jndi failed to pull up the reverse DNS entry from the<a name="line.347"></a>
-<span class="sourceLineNo">348</span>        // name service. Also, in case of ipv6, we need to use the InetAddress since resolving<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        // reverse DNS using jndi doesn't work well with ipv6 addresses.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        ipAddressString = InetAddress.getByName(ipAddress.getHostAddress()).getHostName();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>      }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      if (ipAddressString == null) throw new UnknownHostException("No host found for " + ipAddress);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      hostName = Strings.domainNamePointerToHostName(ipAddressString);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      this.reverseDNSCacheMap.put(ipAddress, hostName);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    return hostName;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * Calculates the number of MapReduce input splits for the map tasks. The number of<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * MapReduce input splits depends on the average region size and the "data skew ratio" user set in<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * configuration.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   *<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @param list  The list of input splits before balance.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @param context  The current job context.<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   * @param average  The average size of all regions .<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * @return The list of input splits.<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * @throws IOException When creating the list of splits fails.<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @see org.apache.hadoop.mapreduce.InputFormat#getSplits(<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *   org.apache.hadoop.mapreduce.JobContext)<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   */<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  private List&lt;InputSplit&gt; calculateRebalancedSplits(List&lt;InputSplit&gt; list, JobContext context,<a name="line.372"></a>
-<span class="sourceLineNo">373</span>                                               long average) throws IOException {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    List&lt;InputSplit&gt; resultList = new ArrayList&lt;InputSplit&gt;();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    Configuration conf = context.getConfiguration();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    //The default data skew ratio is 3<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    long dataSkewRatio = conf.getLong(INPUT_AUTOBALANCE_MAXSKEWRATIO, 3);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    //It determines which mode to use: text key mode or binary key mode. The default is text mode.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    boolean isTextKey = context.getConfiguration().getBoolean(TABLE_ROW_TEXTKEY, true);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    long dataSkewThreshold = dataSkewRatio * average;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    int count = 0;<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    while (count &lt; list.size()) {<a name="line.382"></a>
-<span class="sourceLineNo">383</span>      TableSplit ts = (TableSplit)list.get(count);<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      TableName tableName = ts.getTable();<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      String regionLocation = ts.getRegionLocation();<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      long regionSize = ts.getLength();<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      if (regionSize &gt;= dataSkewThreshold) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>        // if the current region size is large than the data skew threshold,<a name="line.388"></a>
-<span class="sourceLineNo">389</span>        // split the region into two MapReduce input splits.<a name="line.389"></a>
-<span class="sourceLineNo">390</span>        byte[] splitKey = getSplitKey(ts.getStartRow(), ts.getEndRow(), isTextKey);<a name="line.390"></a>
-<span class="sourceLineNo">391</span>         //Set the size of child TableSplit as 1/2 of the region size. The exact size of the<a name="line.391"></a>
-<span class="sourceLineNo">392</span>         // MapReduce input splits is not far off.<a name="line.392"></a>
-<span class="sourceLineNo">393</span>        TableSplit t1 = new TableSplit(tableName, scan, ts.getStartRow(), splitKey, regionLocation,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>                regionSize / 2);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        TableSplit t2 = new TableSplit(tableName, scan, splitKey, ts.getEndRow(), regionLocation,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>                regionSize - regionSize / 2);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        resultList.add(t1);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        resultList.add(t2);<a name="line.398"></a>
-<span class="sourceLineNo">399</span>        count++;<a name="line.399"></a>
-<span class="sourceLineNo">400</span>      } else if (regionSize &gt;= average) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>        // if the region size between average size and data skew threshold size,<a name="line.401"></a>
-<span class="sourceLineNo">402</span>        // make this region as one MapReduce input split.<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        resultList.add(ts);<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        count++;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      } else {<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        // if the total size of several small continuous regions less than the average region size,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>        // combine them into one MapReduce input split.<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long totalSize = regionSize;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        byte[] splitStartKey = ts.getStartRow();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        byte[] splitEndKey = ts.getEndRow();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        count++;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        for (; count &lt; list.size(); count++) {<a name="line.412"></a>
-<span class="sourceLineNo">413</span>          TableSplit nextRegion = (TableSplit)list.get(count);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>          long nextRegionSize = nextRegion.getLength();<a name="line.414"></a>
-<span class="sourceLineNo">415</span>          if (totalSize + nextRegionSize &lt;= dataSkewThreshold) {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            totalSize = totalSize + nextRegionSize;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            splitEndKey = nextRegion.getEndRow();<a name="line.417"></a>
-<span class="sourceLineNo">418</span>          } else {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>            break;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>          }<a name="line.420"></a>
-<span class="sourceLineNo">421</span>        }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        TableSplit t = new TableSplit(tableName, scan, splitStartKey, splitEndKey,<a name="line.422"></a>
-<span class="sourceLineNo">423</span>                regionLocation, totalSize);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        resultList.add(t);<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    return resultList;<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * select a split point in the region. The selection of the split point is based on an uniform<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * distribution assumption for the keys in a region.<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * Here are some examples:<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   * startKey: aaabcdefg  endKey: aaafff    split point: aaad<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * startKey: 111000  endKey: 1125790    split point: 111b<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * startKey: 1110  endKey: 1120    split point: 111_<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   * startKey: binary key { 13, -19, 126, 127 }, endKey: binary key { 13, -19, 127, 0 },<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   * split point: binary key { 13, -19, 127, -64 }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>   * Set this function as "public static", make it easier for test.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>   *<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * @param start Start key of the region<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * @param end End key of the region<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   * @param isText It determines to use text key mode or binary key mode<a name="line.443"></a>
-<span class="sourceLineNo">444</span>   * @return The split point in the region.<a name="line.444"></a>
-<span class="sourceLineNo">445</span>   */<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  @InterfaceAudience.Private<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  public static byte[] getSplitKey(byte[] start, byte[] end, boolean isText) {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>    byte upperLimitByte;<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    byte lowerLimitByte;<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    //Use text mode or binary mode.<a name="line.450"></a>
-<span class="sourceLineNo">451</span>    if (isText) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      //The range of text char set in ASCII is [32,126], the lower limit is space and the upper<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      // limit is '~'.<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      upperLimitByte = '~';<a name="line.454"></a>
-<span class="sourceLineNo">455</span>      lowerLimitByte = ' ';<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    } else {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      upperLimitByte = Byte.MAX_VALUE;<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      lowerLimitByte = Byte.MIN_VALUE;<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span>    // For special case<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    // Example 1 : startkey=null, endkey="hhhqqqwww", splitKey="h"<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    // Example 2 (text key mode): startKey="ffffaaa", endKey=null, splitkey="f~~~~~~"<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    if (start.length == 0 &amp;&amp; end.length == 0){<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      return new byte[]{(byte) ((lowerLimitByte + upperLimitByte) / 2)};<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    }<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    if (start.length == 0 &amp;&amp; end.length != 0){<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      return new byte[]{ end[0] };<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>    if (start.length != 0 &amp;&amp; end.length == 0){<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      byte[] result =new byte[start.length];<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      result[0]=start[0];<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      for (int k = 1; k &lt; start.length; k++){<a name="line.472"></a>
-<span class="sourceLineNo">473</span>          result[k] = upperLimitByte;<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      }<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      return result;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    }<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    // A list to store bytes in split key<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    List resultBytesList = new ArrayList();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    int maxLength = start.length &gt; end.length ? start.length : end.length;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    for (int i = 0; i &lt; maxLength; i++) {<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      //calculate the midpoint byte between the first difference<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      //for example: "11ae" and "11chw", the midpoint is "11b"<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      //another example: "11ae" and "11bhw", the first different byte is 'a' and 'b',<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      // there is no midpoint between 'a' and 'b', so we need to check the next byte.<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      if (start[i] == end[i]) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        resultBytesList.add(start[i]);<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        //For special case like: startKey="aaa", endKey="aaaz", splitKey="aaaM"<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        if (i + 1 == start.length) {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>          resultBytesList.add((byte) ((lowerLimitByte + end[i + 1]) / 2));<a name="line.489"></a>
-<span class="sourceLineNo">490</span>          break;<a name="line.490"></a>
-<span class="sourceLineNo">491</span>        }<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      } else {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        //if the two bytes differ by 1, like ['a','b'], We need to check the next byte to find<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        // the midpoint.<a name="line.494"></a>
-<span class="sourceLineNo">495</span>        if ((int)end[i] - (int)start[i] == 1) {<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          //get next byte after the first difference<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          byte startNextByte = (i + 1 &lt; start.length) ? start[i + 1] : lowerLimitByte;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          byte endNextByte = (i + 1 &lt; end.length) ? end[i + 1] : lowerLimitByte;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          int byteRange = (upperLimitByte - startNextByte) + (endNextByte - lowerLimitByte) + 1;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          int halfRange = byteRange / 2;<a name="line.500"></a>
-<span class="sourceLineNo">501</span>          if ((int)startNextByte + halfRange &gt; (int)upperLimitByte) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>            resultBytesList.add(end[i]);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>            resultBytesList.add((byte) (startNextByte + halfRange - upperLimitByte +<a name="line.503"></a>
-<span class="sourceLineNo">504</span>                    lowerLimitByte));<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          } else {<a name="line.505"></a>
-<span class="sourceLineNo">506</span>            resultBytesList.add(start[i]);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>            resultBytesList.add((byte) (startNextByte + halfRange));<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          }<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        } else {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          //calculate the midpoint key by the fist different byte (normal case),<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          // like "11ae" and "11chw", the midpoint is "11b"<a name="line.511"></a>
-<span class="sourceLineNo">512</span>          resultBytesList.add((byte) ((start[i] + end[i]) / 2));<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        }<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        break;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    //transform the List of bytes to byte[]<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    byte[] result = new byte[resultBytesList.size()];<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    for (int k = 0; k &lt; resultBytesList.size(); k++) {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      result[k] = (byte) resultBytesList.get(k);<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    }<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    return result;<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  }<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  /**<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * Test if the given region is to be included in the InputSplit while splitting<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * the regions of a table.<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * &lt;p&gt;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * This optimization is effective when there is a specific reasoning to exclude an entire region from the M-R job,<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   * (and hence, not contributing to the InputSplit), given the start and end keys of the same. &lt;br&gt;<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   * Useful when we need to remember the last-processed top record and revisit the [last, current) interval for M-R processing,<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   * continuously. In addition to reducing InputSplits, reduces the load on the region server as well, due to the ordering of the keys.<a name="line.532"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.client.RegionLocator;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.client.Result;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.client.Table;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.Addressing;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.RegionSizeCalculator;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.net.DNS;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.util.StringUtils;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>/**<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * A base for {@link TableInputFormat}s. Receives a {@link Connection}, a {@link TableName},<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * an {@link Scan} instance that defines the input columns etc. Subclasses may use<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * other TableRecordReader implementations.<a name="line.61"></a>
+<span class="sourceLineNo">062</span> *<a name="line.62"></a>
+<span class="sourceLineNo">063</span> * Subclasses MUST ensure initializeTable(Connection, TableName) is called for an instance to<a name="line.63"></a>
+<span class="sourceLineNo">064</span> * function properly. Each of the entry points to this class used by the MapReduce framework,<a name="line.64"></a>
+<span class="sourceLineNo">065</span> * {@link #createRecordReader(InputSplit, TaskAttemptContext)} and {@link #getSplits(JobContext)},<a name="line.65"></a>
+<span class="sourceLineNo">066</span> * will call {@link #initialize(JobContext)} as a convenient centralized location to handle<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * retrieving the necessary configuration information. If your subclass overrides either of these<a name="line.67"></a>
+<span class="sourceLineNo">068</span> * methods, either call the parent version or call initialize yourself.<a name="line.68"></a>
+<span class="sourceLineNo">069</span> *<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * &lt;p&gt;<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * An example of a subclass:<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * &lt;pre&gt;<a name="line.72"></a>
+<span class="sourceLineNo">073</span> *   class ExampleTIF extends TableInputFormatBase {<a name="line.73"></a>
+<span class="sourceLineNo">074</span> *<a name="line.74"></a>
+<span class="sourceLineNo">075</span> *     {@literal @}Override<a name="line.75"></a>
+<span class="sourceLineNo">076</span> *     protected void initialize(JobContext context) throws IOException {<a name="line.76"></a>
+<span class="sourceLineNo">077</span> *       // We are responsible for the lifecycle of this connection until we hand it over in<a name="line.77"></a>
+<span class="sourceLineNo">078</span> *       // initializeTable.<a name="line.78"></a>
+<span class="sourceLineNo">079</span> *       Connection connection = ConnectionFactory.createConnection(HBaseConfiguration.create(<a name="line.79"></a>
+<span class="sourceLineNo">080</span> *              job.getConfiguration()));<a name="line.80"></a>
+<span class="sourceLineNo">081</span> *       TableName tableName = TableName.valueOf("exampleTable");<a name="line.81"></a>
+<span class="sourceLineNo">082</span> *       // mandatory. once passed here, TableInputFormatBase will handle closing the connection.<a name="line.82"></a>
+<span class="sourceLineNo">083</span> *       initializeTable(connection, tableName);<a name="line.83"></a>
+<span class="sourceLineNo">084</span> *       byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),<a name="line.84"></a>
+<span class="sourceLineNo">085</span> *         Bytes.toBytes("columnB") };<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *       // optional, by default we'll get everything for the table.<a name="line.86"></a>
+<span class="sourceLineNo">087</span> *       Scan scan = new Scan();<a name="line.87"></a>
+<span class="sourceLineNo">088</span> *       for (byte[] family : inputColumns) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span> *         scan.addFamily(family);<a name="line.89"></a>
+<span class="sourceLineNo">090</span> *       }<a name="line.90"></a>
+<span class="sourceLineNo">091</span> *       Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));<a name="line.91"></a>
+<span class="sourceLineNo">092</span> *       scan.setFilter(exampleFilter);<a name="line.92"></a>
+<span class="sourceLineNo">093</span> *       setScan(scan);<a name="line.93"></a>
+<span class="sourceLineNo">094</span> *     }<a name="line.94"></a>
+<span class="sourceLineNo">095</span> *   }<a name="line.95"></a>
+<span class="sourceLineNo">096</span> * &lt;/pre&gt;<a name="line.96"></a>
+<span class="sourceLineNo">097</span> */<a name="line.97"></a>
+<span class="sourceLineNo">098</span>@InterfaceAudience.Public<a name="line.98"></a>
+<span class="sourceLineNo">099</span>@InterfaceStability.Stable<a name="line.99"></a>
+<span class="sourceLineNo">100</span>public abstract class TableInputFormatBase<a name="line.100"></a>
+<span class="sourceLineNo">101</span>extends InputFormat&lt;ImmutableBytesWritable, Result&gt; {<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  /** Specify if we enable auto-balance for input in M/R jobs.*/<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  public static final String MAPREDUCE_INPUT_AUTOBALANCE = "hbase.mapreduce.input.autobalance";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  /** Specify if ratio for data skew in M/R jobs, it goes well with the enabling hbase.mapreduce<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   * .input.autobalance property.*/<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  public static final String INPUT_AUTOBALANCE_MAXSKEWRATIO = "hbase.mapreduce.input.autobalance" +<a name="line.107"></a>
+<span class="sourceLineNo">108</span>          ".maxskewratio";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  /** Specify if the row key in table is text (ASCII between 32~126),<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * default is true. False means the table is using binary row key*/<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  public static final String TABLE_ROW_TEXTKEY = "hbase.table.row.textkey";<a name="line.111"></a>
+<span class="sourceLineNo">112</span><a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class);<a name="line.113"></a>
+<span class="sourceLineNo">114</span><a name="line.114"></a>
+<span class="sourceLineNo">115</span>  private static final String NOT_INITIALIZED = "The input format instance has not been properly " +<a name="line.115"></a>
+<span class="sourceLineNo">116</span>      "initialized. Ensure you call initializeTable either in your constructor or initialize " +<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "method";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final String INITIALIZATION_ERROR = "Cannot create a record reader because of a" +<a name="line.118"></a>
+<span class="sourceLineNo">119</span>            " previous error. Please look at the previous logs lines from" +<a name="line.119"></a>
+<span class="sourceLineNo">120</span>            " the task's full log for more details.";<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  /** Holds the details for the internal scanner.<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   *<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   * @see Scan */<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  private Scan scan = null;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  /** The {@link Admin}. */<a name="line.126"></a>
+<span class="sourceLineNo">127</span>  private Admin admin;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  /** The {@link Table} to scan. */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  private Table table;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  /** The {@link RegionLocator} of the table. */<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  private RegionLocator regionLocator;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /** The reader scanning the table, can be a custom one. */<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  private TableRecordReader tableRecordReader = null;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  /** The underlying {@link Connection} of the table. */<a name="line.134"></a>
+<span class="sourceLineNo">135</span>  private Connection connection;<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>  <a name="line.137"></a>
+<span class="sourceLineNo">138</span>  /** The reverse DNS lookup cache mapping: IPAddress =&gt; HostName */<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  private HashMap&lt;InetAddress, String&gt; reverseDNSCacheMap =<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    new HashMap&lt;InetAddress, String&gt;();<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>  /**<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   * Builds a {@link TableRecordReader}. If no {@link TableRecordReader} was provided, uses<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   * the default.<a name="line.144"></a>
+<span class="sourceLineNo">145</span>   *<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   * @param split  The split to work with.<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   * @param context  The current context.<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   * @return The newly created record reader.<a name="line.148"></a>
+<span class="sourceLineNo">149</span>   * @throws IOException When creating the reader fails.<a name="line.149"></a>
+<span class="sourceLineNo">150</span>   * @see org.apache.hadoop.mapreduce.InputFormat#createRecordReader(<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   *   org.apache.hadoop.mapreduce.InputSplit,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   *   org.apache.hadoop.mapreduce.TaskAttemptContext)<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   */<a name="line.153"></a>
+<span class="sourceLineNo">154</span>  @Override<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  public RecordReader&lt;ImmutableBytesWritable, Result&gt; createRecordReader(<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      InputSplit split, TaskAttemptContext context)<a name="line.156"></a>
+<span class="sourceLineNo">157</span>  throws IOException {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    // Just in case a subclass is relying on JobConfigurable magic.<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    if (table == null) {<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      initialize(context);<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    // null check in case our child overrides getTable to not throw.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    try {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      if (getTable() == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>        // initialize() must not have been implemented in the subclass.<a name="line.165"></a>
+<span class="sourceLineNo">166</span>        throw new IOException(INITIALIZATION_ERROR);<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      }<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    } catch (IllegalStateException exception) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      throw new IOException(INITIALIZATION_ERROR, exception);<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    TableSplit tSplit = (TableSplit) split;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    LOG.info("Input split length: " + StringUtils.humanReadableInt(tSplit.getLength()) + " bytes.");<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    final TableRecordReader trr =<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        this.tableRecordReader != null ? this.tableRecordReader : new TableRecordReader();<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    Scan sc = new Scan(this.scan);<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    sc.setStartRow(tSplit.getStartRow());<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    sc.setStopRow(tSplit.getEndRow());<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    trr.setScan(sc);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    trr.setTable(getTable());<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    return new RecordReader&lt;ImmutableBytesWritable, Result&gt;() {<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>      @Override<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      public void close() throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        trr.close();<a name="line.184"></a>
+<span class="sourceLineNo">185</span>        closeTable();<a name="line.185"></a>
+<span class="sourceLineNo">186</span>      }<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>      @Override<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      public ImmutableBytesWritable getCurrentKey() throws IOException, InterruptedException {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>        return trr.getCurrentKey();<a name="line.190"></a>
+<span class="sourceLineNo">191</span>      }<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>      @Override<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      public Result getCurrentValue() throws IOException, InterruptedException {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>        return trr.getCurrentValue();<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      @Override<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      public float getProgress() throws IOException, InterruptedException {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>        return trr.getProgress();<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>      @Override<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      public void initialize(InputSplit inputsplit, TaskAttemptContext context) throws IOException,<a name="line.204"></a>
+<span class="sourceLineNo">205</span>          InterruptedException {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>        trr.initialize(inputsplit, context);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      }<a name="line.207"></a>
+<span class="sourceLineNo">208</span><a name="line.208"></a>
+<span class="sourceLineNo">209</span>      @Override<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      public boolean nextKeyValue() throws IOException, InterruptedException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>        return trr.nextKeyValue();<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      }<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    };<a name="line.213"></a>
+<span class="sourceLineNo">214</span>  }<a name="line.214"></a>
+<span class="sourceLineNo">215</span><a name="line.215"></a>
+<span class="sourceLineNo">216</span>  protected Pair&lt;byte[][],byte[][]&gt; getStartEndKeys() throws IOException {<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    return getRegionLocator().getStartEndKeys();<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  }<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>  /**<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * Calculates the splits that will serve as input for the map tasks. The<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   * number of splits matches the number of regions in a table.<a name="line.222"></a>
+<span class="sourceLineNo">223</span>   *<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * @param context  The current job context.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * @return The list of input splits.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   * @throws IOException When creating the list of splits fails.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * @see org.apache.hadoop.mapreduce.InputFormat#getSplits(<a name="line.227"></a>
+<span class="sourceLineNo">228</span>   *   org.apache.hadoop.mapreduce.JobContext)<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  @Override<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  public List&lt;InputSplit&gt; getSplits(JobContext context) throws IOException {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    boolean closeOnFinish = false;<a name="line.232"></a>
+<span class="sourceLineNo">233</span><a name="line.233"></a>
+<span class="sourceLineNo">234</span>    // Just in case a subclass is relying on JobConfigurable magic.<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    if (table == null) {<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      initialize(context);<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      closeOnFinish = true;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    }<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>    // null check in case our child overrides getTable to not throw.<a name="line.240"></a>
+<span class="sourceLineNo">241</span>    try {<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      if (getTable() == null) {<a name="line.242"></a>
+<span class="sourceLineNo">243</span>        // initialize() must not have been implemented in the subclass.<a name="line.243"></a>
+<span class="sourceLineNo">244</span>        throw new IOException(INITIALIZATION_ERROR);<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      }<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    } catch (IllegalStateException exception) {<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      throw new IOException(INITIALIZATION_ERROR, exception);<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    }<a name="line.248"></a>
+<span class="sourceLineNo">249</span><a name="line.249"></a>
+<span class="sourceLineNo">250</span>    try {<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      RegionSizeCalculator sizeCalculator =<a name="line.251"></a>
+<span class="sourceLineNo">252</span>          new RegionSizeCalculator(getRegionLocator(), getAdmin());<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      <a name="line.253"></a>
+<span class="sourceLineNo">254</span>      TableName tableName = getTable().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  <a name="line.255"></a>
+<span class="sourceLineNo">256</span>      Pair&lt;byte[][], byte[][]&gt; keys = getStartEndKeys();<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      if (keys == null || keys.getFirst() == null ||<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          keys.getFirst().length == 0) {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        HRegionLocation regLoc =<a name="line.259"></a>
+<span class="sourceLineNo">260</span>            getRegionLocator().getRegionLocation(HConstants.EMPTY_BYTE_ARRAY, false);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        if (null == regLoc) {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          throw new IOException("Expecting at least one region.");<a name="line.262"></a>
+<span class="sourceLineNo">263</span>        }<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        List&lt;InputSplit&gt; splits = new ArrayList&lt;InputSplit&gt;(1);<a name="line.264"></a>
+<span class="sourceLineNo">265</span>        long regionSize = sizeCalculator.getRegionSize(regLoc.getRegionInfo().getRegionName());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>        TableSplit split = new TableSplit(tableName, scan,<a name="line.266"></a>
+<span class="sourceLineNo">267</span>            HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc<a name="line.267"></a>
+<span class="sourceLineNo">268</span>                .getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0], regionSize);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>        splits.add(split);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>        return splits;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      }<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      List&lt;InputSplit&gt; splits = new ArrayList&lt;InputSplit&gt;(keys.getFirst().length);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      for (int i = 0; i &lt; keys.getFirst().length; i++) {<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        if (!includeRegionInSplit(keys.getFirst()[i], keys.getSecond()[i])) {<a name="line.274"></a>
+<span class="sourceLineNo">275</span>          continue;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>        }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>        HRegionLocation location = getRegionLocator().getRegionLocation(keys.getFirst()[i], false);<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        // The below InetSocketAddress creation does a name resolution.<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        InetSocketAddress isa = new InetSocketAddress(location.getHostname(), location.getPort());<a name="line.279"></a>
+<span class="sourceLineNo">280</span>        if (isa.isUnresolved()) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>          LOG.warn("Failed resolve " + isa);<a name="line.281"></a>
+<span class="sourceLineNo">282</span>        }<a name="line.282"></a>
+<span class="sourceLineNo">283</span>        InetAddress regionAddress = isa.getAddress();<a name="line.283"></a>
+<span class="sourceLineNo">284</span>        String regionLocation;<a name="line.284"></a>
+<span class="sourceLineNo">285</span>        regionLocation = reverseDNS(regionAddress);<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  <a name="line.286"></a>
+<span class="sourceLineNo">287</span>        byte[] startRow = scan.getStartRow();<a name="line.287"></a>
+<span class="sourceLineNo">288</span>        byte[] stopRow = scan.getStopRow();<a name="line.288"></a>
+<span class="sourceLineNo">289</span>        // determine if the given start an stop key fall into the region<a name="line.289"></a>
+<span class="sourceLineNo">290</span>        if ((startRow.length == 0 || keys.getSecond()[i].length == 0 ||<a name="line.290"></a>
+<span class="sourceLineNo">291</span>            Bytes.compareTo(startRow, keys.getSecond()[i]) &lt; 0) &amp;&amp;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>            (stopRow.length == 0 ||<a name="line.292"></a>
+<span class="sourceLineNo">293</span>             Bytes.compareTo(stopRow, keys.getFirst()[i]) &gt; 0)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          byte[] splitStart = startRow.length == 0 ||<a name="line.294"></a>
+<span class="sourceLineNo">295</span>            Bytes.compareTo(keys.getFirst()[i], startRow) &gt;= 0 ?<a name="line.295"></a>
+<span class="sourceLineNo">296</span>              keys.getFirst()[i] : startRow;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          byte[] splitStop = (stopRow.length == 0 ||<a name="line.297"></a>
+<span class="sourceLineNo">298</span>            Bytes.compareTo(keys.getSecond()[i], stopRow) &lt;= 0) &amp;&amp;<a name="line.298"></a>
+<span class="sourceLineNo">299</span>            keys.getSecond()[i].length &gt; 0 ?<a name="line.299"></a>
+<span class="sourceLineNo">300</span>              keys.getSecond()[i] : stopRow;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  <a name="line.301"></a>
+<span class="sourceLineNo">302</span>          byte[] regionName = location.getRegionInfo().getRegionName();<a name="line.302"></a>
+<span class="sourceLineNo">303</span>          long regionSize = sizeCalculator.getRegionSize(regionName);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>          TableSplit split = new TableSplit(tableName, scan,<a name="line.304"></a>
+<span class="sourceLineNo">305</span>            splitStart, splitStop, regionLocation, regionSize);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>          splits.add(split);<a name="line.306"></a>
+<span class="sourceLineNo">307</span>          if (LOG.isDebugEnabled()) {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>            LOG.debug("getSplits: split -&gt; " + i + " -&gt; " + split);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        }<a name="line.310"></a>
+<span class="sourceLineNo">311</span>      }<a name="line.311"></a>
+<span class="sourceLineNo">312</span>      //The default value of "hbase.mapreduce.input.autobalance" is false, which means not enabled.<a name="line.312"></a>
+<span class="sourceLineNo">313</span>      boolean enableAutoBalance = context.getConfiguration()<a name="line.313"></a>
+<span class="sourceLineNo">314</span>        .getBoolean(MAPREDUCE_INPUT_AUTOBALANCE, false);<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      if (enableAutoBalance) {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        long totalRegionSize=0;<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        for (int i = 0; i &lt; splits.size(); i++){<a name="line.317"></a>
+<span class="sourceLineNo">318</span>          TableSplit ts = (TableSplit)splits.get(i);<a name="line.318"></a>
+<span class="sourceLineNo">319</span>          totalRegionSize += ts.getLength();<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        }<a name="line.320"></a>
+<span class="sourceLineNo">321</span>        long averageRegionSize = totalRegionSize / splits.size();<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        // the averageRegionSize must be positive.<a name="line.322"></a>
+<span class="sourceLineNo">323</span>        if (averageRegionSize &lt;= 0) {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>            LOG.warn("The averageRegionSize is not positive: "+ averageRegionSize + ", " +<a name="line.324"></a>
+<span class="sourceLineNo">325</span>                    "set it to 1.");<a name="line.325"></a>
+<span class="sourceLineNo">326</span>            averageRegionSize = 1;<a name="line.326"></a>
+<span class="sourceLineNo">327</span>        }<a name="line.327"></a>
+<span class="sourceLineNo">328</span>        return calculateRebalancedSplits(splits, context, averageRegionSize);<a name="line.328"></a>
+<span class="sourceLineNo">329</span>      } else {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>        return splits;<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      }<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    } finally {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (closeOnFinish) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        closeTable();<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      }<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    }<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
+<span class="sourceLineNo">338</span><a name="line.338"></a>
+<span class="sourceLineNo">339</span>  String reverseDNS(InetAddress ipAddress) throws UnknownHostException {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    String hostName = this.reverseDNSCacheMap.get(ipAddress);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>    if (hostName == null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>      String ipAddressString = null;<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      try {<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        ipAddressString = DNS.reverseDns(ipAddress, null);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>      } catch (Exception e) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>        // We can use InetAddress in case the jndi failed to pull up the reverse DNS entry from the<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        // name service. Also, in case of ipv6, we need to use the InetAddress since resolving<a name="line.347"></a>
+<span class="sourceLineNo">348</span>        // reverse DNS using jndi doesn't work well with ipv6 addresses.<a name="line.348"></a>
+<span class="sourceLineNo">349</span>        ipAddressString = InetAddress.getByName(ipAddress.getHostAddress()).getHostName();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      if (ipAddressString == null) throw new UnknownHostException("No host found for " + ipAddress);<a name="line.351"></a>
+<span class="sourceLineNo">352</span>      hostName = Strings.domainNamePointerToHostName(ipAddressString);<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      this.reverseDNSCacheMap.put(ipAddress, hostName);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    }<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    return hostName;<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  }<a name="line.356"></a>
+<span class="sourceLineNo">357</span><a name="line.357"></a>
+<span class="sourceLineNo">358</span>  /**<a name="line.358"></a>
+<span class="sourceLineNo">359</span>   * Calculates the number of MapReduce input splits for the map tasks. The number of<a name="line.359"></a>
+<span class="sourceLineNo">360</span>   * MapReduce input splits depends on the average region size and the "data skew ratio" user set in<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * configuration.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   *<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @param list  The list of input splits before balance.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @param context  The current job context.<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * @param average  The average size of all regions .<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return The list of input splits.<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * @throws IOException When creating the list of splits fails.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @see org.apache.hadoop.mapreduce.InputFormat#getSplits(<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *   org.apache.hadoop.mapreduce.JobContext)<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   */<a name="line.370"></a>
+<span class="sourceLineNo">371</span>  private List&lt;InputSplit&gt; calculateRebalancedSplits(List&lt;InputSplit&gt; list, JobContext context,<a name="line.371"></a>
+<span class="sourceLineNo">372</span>                                               long average) throws IOException {<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    List&lt;InputSplit&gt; resultList = new ArrayList&lt;InputSplit&gt;();<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    Configuration conf = context.getConfiguration();<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    //The default data skew ratio is 3<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    long dataSkewRatio = conf.getLong(INPUT_AUTOBALANCE_MAXSKEWRATIO, 3);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    //It determines which mode to use: text key mode or binary key mode. The default is text mode.<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    boolean isTextKey = context.getConfiguration().getBoolean(TABLE_ROW_TEXTKEY, true);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    long dataSkewThreshold = dataSkewRatio * average;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    int count = 0;<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    while (count &lt; list.size()) {<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      TableSplit ts = (TableSplit)list.get(count);<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      TableName tableName = ts.getTable();<a name="line.383"></a>
+<span class="sourceLineNo">384</span>      String regionLocation = ts.getRegionLocation();<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      long regionSize = ts.getLength();<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      if (regionSize &gt;= dataSkewThreshold) {<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        // if the current region size is large than the data skew threshold,<a name="line.387"></a>
+<span class="sourceLineNo">388</span>        // split the region into two MapReduce input splits.<a name="line.388"></a>
+<span class="sourceLineNo">389</span>        byte[] splitKey = getSplitKey(ts.getStartRow(), ts.getEndRow(), isTextKey);<a name="line.389"></a>
+<span class="sourceLineNo">390</span>         //Set the size of child TableSplit as 1/2 of the region size. The exact size of the<a name="line.390"></a>
+<span class="sourceLineNo">391</span>         // MapReduce input splits is not far off.<a name="line.391"></a>
+<span class="sourc

<TRUNCATED>

[40/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
index db5e3cc..49d497c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>protected static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.466">BufferedDataBlockEncoder.OffheapDecodedCell</a>
+<pre>protected static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.468">BufferedDataBlockEncoder.OffheapDecodedCell</a>
 extends <a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a>, <a href="../../../../../../org/apache/hadoop/hbase/SettableSequenceId.html" title="interface in org.apache.hadoop.hbase">SettableSequenceId</a>, <a href="../../../../../../org/apache/hadoop/hbase/Streamable.html" title="interface in org.apache.hadoop.hbase">Streamable</a></pre>
 </li>
@@ -415,7 +415,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>FIXED_OVERHEAD</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.468">FIXED_OVERHEAD</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.470">FIXED_OVERHEAD</a></pre>
 </li>
 </ul>
 <a name="keyBuffer">
@@ -424,7 +424,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>keyBuffer</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.471">keyBuffer</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.473">keyBuffer</a></pre>
 </li>
 </ul>
 <a name="rowLength">
@@ -433,7 +433,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>rowLength</h4>
-<pre>private&nbsp;short <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.472">rowLength</a></pre>
+<pre>private&nbsp;short <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.474">rowLength</a></pre>
 </li>
 </ul>
 <a name="familyOffset">
@@ -442,7 +442,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>familyOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.473">familyOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.475">familyOffset</a></pre>
 </li>
 </ul>
 <a name="familyLength">
@@ -451,7 +451,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>familyLength</h4>
-<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.474">familyLength</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.476">familyLength</a></pre>
 </li>
 </ul>
 <a name="qualifierOffset">
@@ -460,7 +460,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>qualifierOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.475">qualifierOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.477">qualifierOffset</a></pre>
 </li>
 </ul>
 <a name="qualifierLength">
@@ -469,7 +469,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>qualifierLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.476">qualifierLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.478">qualifierLength</a></pre>
 </li>
 </ul>
 <a name="timestamp">
@@ -478,7 +478,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>timestamp</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.477">timestamp</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.479">timestamp</a></pre>
 </li>
 </ul>
 <a name="typeByte">
@@ -487,7 +487,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>typeByte</h4>
-<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.478">typeByte</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.480">typeByte</a></pre>
 </li>
 </ul>
 <a name="valueBuffer">
@@ -496,7 +496,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>valueBuffer</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.479">valueBuffer</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.481">valueBuffer</a></pre>
 </li>
 </ul>
 <a name="valueOffset">
@@ -505,7 +505,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>valueOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.480">valueOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.482">valueOffset</a></pre>
 </li>
 </ul>
 <a name="valueLength">
@@ -514,7 +514,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>valueLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.481">valueLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.483">valueLength</a></pre>
 </li>
 </ul>
 <a name="tagsBuffer">
@@ -523,7 +523,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsBuffer</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.482">tagsBuffer</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.484">tagsBuffer</a></pre>
 </li>
 </ul>
 <a name="tagsOffset">
@@ -532,7 +532,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.483">tagsOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.485">tagsOffset</a></pre>
 </li>
 </ul>
 <a name="tagsLength">
@@ -541,7 +541,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.484">tagsLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.486">tagsLength</a></pre>
 </li>
 </ul>
 <a name="seqId">
@@ -550,7 +550,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockListLast">
 <li class="blockList">
 <h4>seqId</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.485">seqId</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.487">seqId</a></pre>
 </li>
 </ul>
 </li>
@@ -567,7 +567,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BufferedDataBlockEncoder.OffheapDecodedCell</h4>
-<pre>protected&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.487">BufferedDataBlockEncoder.OffheapDecodedCell</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;keyBuffer,
+<pre>protected&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.489">BufferedDataBlockEncoder.OffheapDecodedCell</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;keyBuffer,
                                            short&nbsp;rowLength,
                                            int&nbsp;familyOffset,
                                            byte&nbsp;familyLength,
@@ -598,7 +598,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.512">getRowArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.514">getRowArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.</div>
@@ -614,7 +614,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.517">getRowOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.519">getRowOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowOffset()">getRowOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -627,7 +627,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowLength</h4>
-<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.522">getRowLength</a>()</pre>
+<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.524">getRowLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowLength()">getRowLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -640,7 +640,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.527">getFamilyArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.529">getFamilyArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous bytes composed of legal HDFS filename characters which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.</div>
@@ -656,7 +656,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.532">getFamilyOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.534">getFamilyOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyOffset()">getFamilyOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -669,7 +669,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyLength</h4>
-<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.537">getFamilyLength</a>()</pre>
+<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.539">getFamilyLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyLength()">getFamilyLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -682,7 +682,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.542">getQualifierArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.544">getQualifierArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.</div>
@@ -698,7 +698,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.547">getQualifierOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.549">getQualifierOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierOffset()">getQualifierOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -711,7 +711,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.552">getQualifierLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.554">getQualifierLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierLength()">getQualifierLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -724,7 +724,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTimestamp</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.557">getTimestamp</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.559">getTimestamp</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTimestamp()">getTimestamp</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -738,7 +738,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTypeByte</h4>
-<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.562">getTypeByte</a>()</pre>
+<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.564">getTypeByte</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTypeByte()">getTypeByte</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -751,7 +751,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getSequenceId</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.567">getSequenceId</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.569">getSequenceId</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getSequenceId()">Cell</a></code></strong></div>
 <div class="block">A region-specific unique monotonically increasing sequence ID given to each Cell. It always
  exists for cells in the memstore but is not retained forever. It will be kept for
@@ -769,7 +769,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.572">getValueArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.574">getValueArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Integer.MAX_VALUE which is 2,147,483,648 bytes.</div>
@@ -785,7 +785,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.577">getValueOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.579">getValueOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueOffset()">getValueOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -798,7 +798,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.582">getValueLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.584">getValueLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueLength()">getValueLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -811,7 +811,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.587">getTagsArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.589">getTagsArray</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsArray()">getTagsArray</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -824,7 +824,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.592">getTagsOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.594">getTagsOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsOffset()">getTagsOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -837,7 +837,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.597">getTagsLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.599">getTagsLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsLength()">getTagsLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -850,7 +850,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.602">getRowByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.604">getRowByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getRowByteBuffer()">getRowByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -863,7 +863,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowPosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.607">getRowPosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.609">getRowPosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getRowPosition()">getRowPosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -876,7 +876,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.612">getFamilyByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.614">getFamilyByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getFamilyByteBuffer()">getFamilyByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -889,7 +889,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyPosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.617">getFamilyPosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.619">getFamilyPosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getFamilyPosition()">getFamilyPosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -902,7 +902,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.622">getQualifierByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.624">getQualifierByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getQualifierByteBuffer()">getQualifierByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -915,7 +915,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierPosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.627">getQualifierPosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.629">getQualifierPosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getQualifierPosition()">getQualifierPosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -928,7 +928,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.632">getValueByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.634">getValueByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getValueByteBuffer()">getValueByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -941,7 +941,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getValuePosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.637">getValuePosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.639">getValuePosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getValuePosition()">getValuePosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -954,7 +954,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.642">getTagsByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.644">getTagsByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getTagsByteBuffer()">getTagsByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -967,7 +967,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsPosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.647">getTagsPosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.649">getTagsPosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getTagsPosition()">getTagsPosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -980,7 +980,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.652">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.654">heapSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize()">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -994,7 +994,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>setSequenceId</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.657">setSequenceId</a>(long&nbsp;seqId)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.659">setSequenceId</a>(long&nbsp;seqId)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/SettableSequenceId.html#setSequenceId(long)">SettableSequenceId</a></code></strong></div>
 <div class="block">Sets with the given seqId.</div>
 <dl>
@@ -1009,7 +1009,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>write</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.662">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out)
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.664">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out)
           throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Streamable.html#write(java.io.OutputStream)">Streamable</a></code></strong></div>
 <div class="block">Write this cell to an OutputStream.</div>
@@ -1028,7 +1028,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html"
 <ul class="blockListLast">
 <li class="blockList">
 <h4>write</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.667">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html#line.669">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out,
         boolean&nbsp;withTags)
           throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Streamable.html#write(java.io.OutputStream,%20boolean)">Streamable</a></code></strong></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
index b25eeb7..267440d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>protected static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.290">BufferedDataBlockEncoder.OnheapDecodedCell</a>
+<pre>protected static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.292">BufferedDataBlockEncoder.OnheapDecodedCell</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>, <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a>, <a href="../../../../../../org/apache/hadoop/hbase/SettableSequenceId.html" title="interface in org.apache.hadoop.hbase">SettableSequenceId</a>, <a href="../../../../../../org/apache/hadoop/hbase/Streamable.html" title="interface in org.apache.hadoop.hbase">Streamable</a></pre>
 <div class="block">Copies only the key part of the keybuffer by doing a deep copy and passes the
@@ -378,7 +378,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>FIXED_OVERHEAD</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.292">FIXED_OVERHEAD</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.294">FIXED_OVERHEAD</a></pre>
 </li>
 </ul>
 <a name="keyOnlyBuffer">
@@ -387,7 +387,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>keyOnlyBuffer</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.295">keyOnlyBuffer</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.297">keyOnlyBuffer</a></pre>
 </li>
 </ul>
 <a name="rowLength">
@@ -396,7 +396,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>rowLength</h4>
-<pre>private&nbsp;short <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.296">rowLength</a></pre>
+<pre>private&nbsp;short <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.298">rowLength</a></pre>
 </li>
 </ul>
 <a name="familyOffset">
@@ -405,7 +405,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>familyOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.297">familyOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.299">familyOffset</a></pre>
 </li>
 </ul>
 <a name="familyLength">
@@ -414,7 +414,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>familyLength</h4>
-<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.298">familyLength</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.300">familyLength</a></pre>
 </li>
 </ul>
 <a name="qualifierOffset">
@@ -423,7 +423,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>qualifierOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.299">qualifierOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.301">qualifierOffset</a></pre>
 </li>
 </ul>
 <a name="qualifierLength">
@@ -432,7 +432,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>qualifierLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.300">qualifierLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.302">qualifierLength</a></pre>
 </li>
 </ul>
 <a name="timestamp">
@@ -441,7 +441,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>timestamp</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.301">timestamp</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.303">timestamp</a></pre>
 </li>
 </ul>
 <a name="typeByte">
@@ -450,7 +450,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>typeByte</h4>
-<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.302">typeByte</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.304">typeByte</a></pre>
 </li>
 </ul>
 <a name="valueBuffer">
@@ -459,7 +459,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>valueBuffer</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.303">valueBuffer</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.305">valueBuffer</a></pre>
 </li>
 </ul>
 <a name="valueOffset">
@@ -468,7 +468,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>valueOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.304">valueOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.306">valueOffset</a></pre>
 </li>
 </ul>
 <a name="valueLength">
@@ -477,7 +477,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>valueLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.305">valueLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.307">valueLength</a></pre>
 </li>
 </ul>
 <a name="tagsBuffer">
@@ -486,7 +486,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsBuffer</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.306">tagsBuffer</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.308">tagsBuffer</a></pre>
 </li>
 </ul>
 <a name="tagsOffset">
@@ -495,7 +495,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.307">tagsOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.309">tagsOffset</a></pre>
 </li>
 </ul>
 <a name="tagsLength">
@@ -504,7 +504,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.308">tagsLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.310">tagsLength</a></pre>
 </li>
 </ul>
 <a name="seqId">
@@ -513,7 +513,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>seqId</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.309">seqId</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.311">seqId</a></pre>
 </li>
 </ul>
 </li>
@@ -530,7 +530,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BufferedDataBlockEncoder.OnheapDecodedCell</h4>
-<pre>protected&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.311">BufferedDataBlockEncoder.OnheapDecodedCell</a>(byte[]&nbsp;keyBuffer,
+<pre>protected&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.313">BufferedDataBlockEncoder.OnheapDecodedCell</a>(byte[]&nbsp;keyBuffer,
                                           short&nbsp;rowLength,
                                           int&nbsp;familyOffset,
                                           byte&nbsp;familyLength,
@@ -561,7 +561,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.333">getRowArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.335">getRowArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.</div>
@@ -577,7 +577,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.338">getFamilyArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.340">getFamilyArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous bytes composed of legal HDFS filename characters which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.</div>
@@ -593,7 +593,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.343">getQualifierArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.345">getQualifierArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.</div>
@@ -609,7 +609,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.348">getRowOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.350">getRowOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowOffset()">getRowOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -622,7 +622,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowLength</h4>
-<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.353">getRowLength</a>()</pre>
+<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.355">getRowLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowLength()">getRowLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -635,7 +635,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.358">getFamilyOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.360">getFamilyOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyOffset()">getFamilyOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -648,7 +648,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyLength</h4>
-<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.363">getFamilyLength</a>()</pre>
+<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.365">getFamilyLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyLength()">getFamilyLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -661,7 +661,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.368">getQualifierOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.370">getQualifierOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierOffset()">getQualifierOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -674,7 +674,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.373">getQualifierLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.375">getQualifierLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierLength()">getQualifierLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -687,7 +687,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTimestamp</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.378">getTimestamp</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.380">getTimestamp</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTimestamp()">getTimestamp</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -701,7 +701,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTypeByte</h4>
-<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.383">getTypeByte</a>()</pre>
+<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.385">getTypeByte</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTypeByte()">getTypeByte</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -714,7 +714,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getSequenceId</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.388">getSequenceId</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.390">getSequenceId</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getSequenceId()">Cell</a></code></strong></div>
 <div class="block">A region-specific unique monotonically increasing sequence ID given to each Cell. It always
  exists for cells in the memstore but is not retained forever. It will be kept for
@@ -732,7 +732,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.393">getValueArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.395">getValueArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Integer.MAX_VALUE which is 2,147,483,648 bytes.</div>
@@ -748,7 +748,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.398">getValueOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.400">getValueOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueOffset()">getValueOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -761,7 +761,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.403">getValueLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.405">getValueLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueLength()">getValueLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -774,7 +774,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.408">getTagsArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.410">getTagsArray</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsArray()">getTagsArray</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -787,7 +787,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.413">getTagsOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.415">getTagsOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsOffset()">getTagsOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -800,7 +800,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.418">getTagsLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.420">getTagsLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsLength()">getTagsLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -813,7 +813,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.423">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.425">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -826,7 +826,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>setSequenceId</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.429">setSequenceId</a>(long&nbsp;seqId)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.431">setSequenceId</a>(long&nbsp;seqId)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/SettableSequenceId.html#setSequenceId(long)">SettableSequenceId</a></code></strong></div>
 <div class="block">Sets with the given seqId.</div>
 <dl>
@@ -841,7 +841,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.434">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.436">heapSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize()">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -855,7 +855,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>write</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.439">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out)
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.441">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out)
           throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Streamable.html#write(java.io.OutputStream)">Streamable</a></code></strong></div>
 <div class="block">Write this cell to an OutputStream.</div>
@@ -874,7 +874,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>write</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.444">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html#line.446">write</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out,
         boolean&nbsp;withTags)
           throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Streamable.html#write(java.io.OutputStream,%20boolean)">Streamable</a></code></strong></div>


[11/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
index ec27cfe..6d8219b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
@@ -90,606 +90,612 @@
 <span class="sourceLineNo">082</span>   */<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
 <span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * If the chosen ioengine can persist its state across restarts, the path to the file to<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   * persist to.<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   */<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  public static final String BUCKET_CACHE_PERSISTENT_PATH_KEY = <a name="line.88"></a>
-<span class="sourceLineNo">089</span>      "hbase.bucketcache.persistent.path";<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>  /**<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * If the bucket cache is used in league with the lru on-heap block cache (meta blocks such<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * as indices and blooms are kept in the lru blockcache and the data blocks in the<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * bucket cache).<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static final String BUCKET_CACHE_COMBINED_KEY = <a name="line.96"></a>
-<span class="sourceLineNo">097</span>      "hbase.bucketcache.combinedcache.enabled";<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final String BUCKET_CACHE_WRITER_THREADS_KEY = "hbase.bucketcache.writer.threads";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  public static final String BUCKET_CACHE_WRITER_QUEUE_KEY = <a name="line.100"></a>
-<span class="sourceLineNo">101</span>      "hbase.bucketcache.writer.queuelength";<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  /**<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   * A comma-delimited array of values for use as bucket sizes.<a name="line.104"></a>
-<span class="sourceLineNo">105</span>   */<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  public static final String BUCKET_CACHE_BUCKETS_KEY = "hbase.bucketcache.bucket.sizes";<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /**<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * Defaults for Bucket cache<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   */<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  public static final boolean DEFAULT_BUCKET_CACHE_COMBINED = true;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public static final int DEFAULT_BUCKET_CACHE_WRITER_THREADS = 3;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  public static final int DEFAULT_BUCKET_CACHE_WRITER_QUEUE = 64;<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span> /**<a name="line.115"></a>
-<span class="sourceLineNo">116</span>   * Configuration key to prefetch all blocks of a given file into the block cache<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * when the file is opened.<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   */<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  public static final String PREFETCH_BLOCKS_ON_OPEN_KEY =<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      "hbase.rs.prefetchblocksonopen";<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  /**<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   * The target block size used by blockcache instances. Defaults to<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * {@link HConstants#DEFAULT_BLOCKSIZE}.<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * TODO: this config point is completely wrong, as it's used to determine the<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * target block size of BlockCache instances. Rename.<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public static final String BLOCKCACHE_BLOCKSIZE_KEY = "hbase.offheapcache.minblocksize";<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  private static final String EXTERNAL_BLOCKCACHE_KEY = "hbase.blockcache.use.external";<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  private static final boolean EXTERNAL_BLOCKCACHE_DEFAULT = false;<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>  private static final String EXTERNAL_BLOCKCACHE_CLASS_KEY="hbase.blockcache.external.class";<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  private static final String DROP_BEHIND_CACHE_COMPACTION_KEY="hbase.hfile.drop.behind.compaction";<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  private static final boolean DROP_BEHIND_CACHE_COMPACTION_DEFAULT = true;<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  /**<a name="line.137"></a>
-<span class="sourceLineNo">138</span>   * Enum of all built in external block caches.<a name="line.138"></a>
-<span class="sourceLineNo">139</span>   * This is used for config.<a name="line.139"></a>
-<span class="sourceLineNo">140</span>   */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  private static enum ExternalBlockCaches {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    memcached("org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    // TODO(eclark): Consider more. Redis, etc.<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    Class&lt;? extends BlockCache&gt; clazz;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    ExternalBlockCaches(String clazzName) {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      try {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>        clazz = (Class&lt;? extends BlockCache&gt;) Class.forName(clazzName);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      } catch (ClassNotFoundException cnef) {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        clazz = null;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    }<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    ExternalBlockCaches(Class&lt;? extends BlockCache&gt; clazz) {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      this.clazz = clazz;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  // Defaults<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  public static final boolean DEFAULT_CACHE_DATA_ON_READ = true;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public static final boolean DEFAULT_CACHE_DATA_ON_WRITE = false;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static final boolean DEFAULT_IN_MEMORY = false;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>  public static final boolean DEFAULT_CACHE_INDEXES_ON_WRITE = false;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  public static final boolean DEFAULT_CACHE_BLOOMS_ON_WRITE = false;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>  public static final boolean DEFAULT_EVICT_ON_CLOSE = false;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  public static final boolean DEFAULT_CACHE_DATA_COMPRESSED = false;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  public static final boolean DEFAULT_PREFETCH_ON_OPEN = false;<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  /** Local reference to the block cache, null if completely disabled */<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private final BlockCache blockCache;<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
-<span class="sourceLineNo">171</span>   * Whether blocks should be cached on read (default is on if there is a<a name="line.171"></a>
-<span class="sourceLineNo">172</span>   * cache but this can be turned off on a per-family or per-request basis).<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * If off we will STILL cache meta blocks; i.e. INDEX and BLOOM types.<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * This cannot be disabled.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   */<a name="line.175"></a>
-<span class="sourceLineNo">176</span>  private boolean cacheDataOnRead;<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>  /** Whether blocks should be flagged as in-memory when being cached */<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private final boolean inMemory;<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>  /** Whether data blocks should be cached when new files are written */<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  private boolean cacheDataOnWrite;<a name="line.182"></a>
+<span class="sourceLineNo">085</span>   * If the chosen ioengine can persist its state across restarts, the path to the file to persist<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   * to. This file is NOT the data file. It is a file into which we will serialize the map of<a name="line.86"></a>
+<span class="sourceLineNo">087</span>   * what is in the data file. For example, if you pass the following argument as<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   * BUCKET_CACHE_IOENGINE_KEY ("hbase.bucketcache.ioengine"),<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * &lt;code&gt;file:/tmp/bucketcache.data &lt;/code&gt;, then we will write the bucketcache data to the file<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   * &lt;code&gt;/tmp/bucketcache.data&lt;/code&gt; but the metadata on where the data is in the supplied file<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * is an in-memory map that needs to be persisted across restarts. Where to store this<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * in-memory state is what you supply here: e.g. &lt;code&gt;/tmp/bucketcache.map&lt;/code&gt;.<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   */<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  public static final String BUCKET_CACHE_PERSISTENT_PATH_KEY = <a name="line.94"></a>
+<span class="sourceLineNo">095</span>      "hbase.bucketcache.persistent.path";<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  /**<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * If the bucket cache is used in league with the lru on-heap block cache (meta blocks such<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * as indices and blooms are kept in the lru blockcache and the data blocks in the<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   * bucket cache).<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  public static final String BUCKET_CACHE_COMBINED_KEY = <a name="line.102"></a>
+<span class="sourceLineNo">103</span>      "hbase.bucketcache.combinedcache.enabled";<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  public static final String BUCKET_CACHE_WRITER_THREADS_KEY = "hbase.bucketcache.writer.threads";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public static final String BUCKET_CACHE_WRITER_QUEUE_KEY = <a name="line.106"></a>
+<span class="sourceLineNo">107</span>      "hbase.bucketcache.writer.queuelength";<a name="line.107"></a>
+<span class="sourceLineNo">108</span><a name="line.108"></a>
+<span class="sourceLineNo">109</span>  /**<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * A comma-delimited array of values for use as bucket sizes.<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   */<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final String BUCKET_CACHE_BUCKETS_KEY = "hbase.bucketcache.bucket.sizes";<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  /**<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   * Defaults for Bucket cache<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public static final boolean DEFAULT_BUCKET_CACHE_COMBINED = true;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public static final int DEFAULT_BUCKET_CACHE_WRITER_THREADS = 3;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  public static final int DEFAULT_BUCKET_CACHE_WRITER_QUEUE = 64;<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span> /**<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * Configuration key to prefetch all blocks of a given file into the block cache<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * when the file is opened.<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   */<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  public static final String PREFETCH_BLOCKS_ON_OPEN_KEY =<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      "hbase.rs.prefetchblocksonopen";<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  /**<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   * The target block size used by blockcache instances. Defaults to<a name="line.129"></a>
+<span class="sourceLineNo">130</span>   * {@link HConstants#DEFAULT_BLOCKSIZE}.<a name="line.130"></a>
+<span class="sourceLineNo">131</span>   * TODO: this config point is completely wrong, as it's used to determine the<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   * target block size of BlockCache instances. Rename.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   */<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public static final String BLOCKCACHE_BLOCKSIZE_KEY = "hbase.offheapcache.minblocksize";<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>  private static final String EXTERNAL_BLOCKCACHE_KEY = "hbase.blockcache.use.external";<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  private static final boolean EXTERNAL_BLOCKCACHE_DEFAULT = false;<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  private static final String EXTERNAL_BLOCKCACHE_CLASS_KEY="hbase.blockcache.external.class";<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  private static final String DROP_BEHIND_CACHE_COMPACTION_KEY="hbase.hfile.drop.behind.compaction";<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  private static final boolean DROP_BEHIND_CACHE_COMPACTION_DEFAULT = true;<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  /**<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   * Enum of all built in external block caches.<a name="line.144"></a>
+<span class="sourceLineNo">145</span>   * This is used for config.<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   */<a name="line.146"></a>
+<span class="sourceLineNo">147</span>  private static enum ExternalBlockCaches {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    memcached("org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    // TODO(eclark): Consider more. Redis, etc.<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    Class&lt;? extends BlockCache&gt; clazz;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    ExternalBlockCaches(String clazzName) {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      try {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        clazz = (Class&lt;? extends BlockCache&gt;) Class.forName(clazzName);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      } catch (ClassNotFoundException cnef) {<a name="line.154"></a>
+<span class="sourceLineNo">155</span>        clazz = null;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      }<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    }<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    ExternalBlockCaches(Class&lt;? extends BlockCache&gt; clazz) {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      this.clazz = clazz;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    }<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  }<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>  // Defaults<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  public static final boolean DEFAULT_CACHE_DATA_ON_READ = true;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>  public static final boolean DEFAULT_CACHE_DATA_ON_WRITE = false;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>  public static final boolean DEFAULT_IN_MEMORY = false;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  public static final boolean DEFAULT_CACHE_INDEXES_ON_WRITE = false;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  public static final boolean DEFAULT_CACHE_BLOOMS_ON_WRITE = false;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  public static final boolean DEFAULT_EVICT_ON_CLOSE = false;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  public static final boolean DEFAULT_CACHE_DATA_COMPRESSED = false;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>  public static final boolean DEFAULT_PREFETCH_ON_OPEN = false;<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>  /** Local reference to the block cache, null if completely disabled */<a name="line.173"></a>
+<span class="sourceLineNo">174</span>  private final BlockCache blockCache;<a name="line.174"></a>
+<span class="sourceLineNo">175</span><a name="line.175"></a>
+<span class="sourceLineNo">176</span>  /**<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * Whether blocks should be cached on read (default is on if there is a<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * cache but this can be turned off on a per-family or per-request basis).<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * If off we will STILL cache meta blocks; i.e. INDEX and BLOOM types.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * This cannot be disabled.<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  private boolean cacheDataOnRead;<a name="line.182"></a>
 <span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  /** Whether index blocks should be cached when new files are written */<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final boolean cacheIndexesOnWrite;<a name="line.185"></a>
+<span class="sourceLineNo">184</span>  /** Whether blocks should be flagged as in-memory when being cached */<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  private final boolean inMemory;<a name="line.185"></a>
 <span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Whether compound bloom filter blocks should be cached on write */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final boolean cacheBloomsOnWrite;<a name="line.188"></a>
+<span class="sourceLineNo">187</span>  /** Whether data blocks should be cached when new files are written */<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  private boolean cacheDataOnWrite;<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Whether blocks of a file should be evicted when the file is closed */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private boolean evictOnClose;<a name="line.191"></a>
+<span class="sourceLineNo">190</span>  /** Whether index blocks should be cached when new files are written */<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  private final boolean cacheIndexesOnWrite;<a name="line.191"></a>
 <span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>  /** Whether data blocks should be stored in compressed and/or encrypted form in the cache */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  private final boolean cacheDataCompressed;<a name="line.194"></a>
+<span class="sourceLineNo">193</span>  /** Whether compound bloom filter blocks should be cached on write */<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  private final boolean cacheBloomsOnWrite;<a name="line.194"></a>
 <span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  /** Whether data blocks should be prefetched into the cache */<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private final boolean prefetchOnOpen;<a name="line.197"></a>
+<span class="sourceLineNo">196</span>  /** Whether blocks of a file should be evicted when the file is closed */<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  private boolean evictOnClose;<a name="line.197"></a>
 <span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * If true and if more than one tier in this cache deploy -- e.g. CombinedBlockCache has an L1<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * and an L2 tier -- then cache data blocks up in the L1 tier (The meta blocks are likely being<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * cached up in L1 already.  At least this is the case if CombinedBlockCache).<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   */<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private boolean cacheDataInL1;<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  private final boolean dropBehindCompaction;<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  /**<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   * Create a cache configuration using the specified configuration object and<a name="line.209"></a>
-<span class="sourceLineNo">210</span>   * family descriptor.<a name="line.210"></a>
-<span class="sourceLineNo">211</span>   * @param conf hbase configuration<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   * @param family column family configuration<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  public CacheConfig(Configuration conf, HColumnDescriptor family) {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        family.isBlockCacheEnabled(),<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        family.isInMemory(),<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // For the following flags we enable them regardless of per-schema settings<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        // if they are enabled in the global configuration.<a name="line.219"></a>
-<span class="sourceLineNo">220</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY,<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            DEFAULT_CACHE_DATA_ON_WRITE) || family.isCacheDataOnWrite(),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>            DEFAULT_CACHE_INDEXES_ON_WRITE) || family.isCacheIndexesOnWrite(),<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY,<a name="line.224"></a>
-<span class="sourceLineNo">225</span>            DEFAULT_CACHE_BLOOMS_ON_WRITE) || family.isCacheBloomsOnWrite(),<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>            DEFAULT_EVICT_ON_CLOSE) || family.isEvictBlocksOnClose(),<a name="line.227"></a>
-<span class="sourceLineNo">228</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY,<a name="line.229"></a>
-<span class="sourceLineNo">230</span>            DEFAULT_PREFETCH_ON_OPEN) || family.isPrefetchBlocksOnOpen(),<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>            HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1) || family.isCacheDataInL1(),<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY,DROP_BEHIND_CACHE_COMPACTION_DEFAULT)<a name="line.233"></a>
-<span class="sourceLineNo">234</span>     );<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  }<a name="line.235"></a>
-<span class="sourceLineNo">236</span><a name="line.236"></a>
-<span class="sourceLineNo">237</span>  /**<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * Create a cache configuration using the specified configuration object and<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   * defaults for family level settings.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>   * @param conf hbase configuration<a name="line.240"></a>
-<span class="sourceLineNo">241</span>   */<a name="line.241"></a>
-<span class="sourceLineNo">242</span>  public CacheConfig(Configuration conf) {<a name="line.242"></a>
-<span class="sourceLineNo">243</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        DEFAULT_CACHE_DATA_ON_READ,<a name="line.244"></a>
-<span class="sourceLineNo">245</span>        DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set<a name="line.245"></a>
-<span class="sourceLineNo">246</span>                           // strictly from conf<a name="line.246"></a>
-<span class="sourceLineNo">247</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),<a name="line.248"></a>
-<span class="sourceLineNo">249</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),<a name="line.249"></a>
-<span class="sourceLineNo">250</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.251"></a>
-<span class="sourceLineNo">252</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1),<a name="line.254"></a>
-<span class="sourceLineNo">255</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY,DROP_BEHIND_CACHE_COMPACTION_DEFAULT)<a name="line.255"></a>
-<span class="sourceLineNo">256</span>     );<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  }<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>  /**<a name="line.259"></a>
-<span class="sourceLineNo">260</span>   * Create a block cache configuration with the specified cache and<a name="line.260"></a>
-<span class="sourceLineNo">261</span>   * configuration parameters.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>   * @param blockCache reference to block cache, null if completely disabled<a name="line.262"></a>
-<span class="sourceLineNo">263</span>   * @param cacheDataOnRead whether DATA blocks should be cached on read (we always cache INDEX<a name="line.263"></a>
-<span class="sourceLineNo">264</span>   * blocks and BLOOM blocks; this cannot be disabled).<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * @param inMemory whether blocks should be flagged as in-memory<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * @param cacheDataOnWrite whether data blocks should be cached on write<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * @param cacheIndexesOnWrite whether index blocks should be cached on write<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * @param cacheBloomsOnWrite whether blooms should be cached on write<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * @param evictOnClose whether blocks should be evicted when HFile is closed<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   * @param cacheDataCompressed whether to store blocks as compressed in the cache<a name="line.270"></a>
-<span class="sourceLineNo">271</span>   * @param prefetchOnOpen whether to prefetch blocks upon open<a name="line.271"></a>
-<span class="sourceLineNo">272</span>   * @param cacheDataInL1 If more than one cache tier deployed, if true, cache this column families<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * data blocks up in the L1 tier.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  CacheConfig(final BlockCache blockCache,<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      final boolean cacheDataInL1, final boolean dropBehindCompaction) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    this.blockCache = blockCache;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.inMemory = inMemory;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    this.evictOnClose = evictOnClose;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    this.cacheDataInL1 = cacheDataInL1;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    LOG.info(this);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>  /**<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * @param cacheConf<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    this(cacheConf.blockCache, cacheConf.cacheDataOnRead, cacheConf.inMemory,<a name="line.300"></a>
-<span class="sourceLineNo">301</span>        cacheConf.cacheDataOnWrite, cacheConf.cacheIndexesOnWrite,<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        cacheConf.cacheBloomsOnWrite, cacheConf.evictOnClose,<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        cacheConf.cacheDataCompressed, cacheConf.prefetchOnOpen,<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        cacheConf.cacheDataInL1, cacheConf.dropBehindCompaction);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  }<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /**<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * Checks whether the block cache is enabled.<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   */<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  public boolean isBlockCacheEnabled() {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    return this.blockCache != null;<a name="line.311"></a>
-<span class="sourceLineNo">312</span>  }<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>  /**<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * Returns the block cache.<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * @return the block cache, or null if caching is completely disabled<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  public BlockCache getBlockCache() {<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    return this.blockCache;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @return true if blocks should be cached on read, false if not<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   */<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  public boolean shouldCacheDataOnRead() {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    return isBlockCacheEnabled() &amp;&amp; cacheDataOnRead;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  }<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  public boolean shouldDropBehindCompaction() {<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    return dropBehindCompaction;<a name="line.332"></a>
-<span class="sourceLineNo">333</span>  }<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>  /**<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * Should we cache a block of a particular category? We always cache<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * available.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    return isBlockCacheEnabled()<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        &amp;&amp; (cacheDataOnRead ||<a name="line.342"></a>
-<span class="sourceLineNo">343</span>            category == BlockCategory.INDEX ||<a name="line.343"></a>
-<span class="sourceLineNo">344</span>            category == BlockCategory.BLOOM ||<a name="line.344"></a>
-<span class="sourceLineNo">345</span>            (prefetchOnOpen &amp;&amp;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>                (category != BlockCategory.META &amp;&amp;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>                 category != BlockCategory.UNKNOWN)));<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  }<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   */<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  public boolean isInMemory() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return isBlockCacheEnabled() &amp;&amp; this.inMemory;<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * @return True if cache data blocks in L1 tier (if more than one tier in block cache deploy).<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   */<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  public boolean isCacheDataInL1() {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataInL1;<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   *         written, false if not<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public boolean shouldCacheDataOnWrite() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnWrite;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>  }<a name="line.370"></a>
-<span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>  /**<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * Only used for testing.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   *                         when an HFile is written<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   */<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  @VisibleForTesting<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Only used for testing.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param cacheDataInL1 Whether to cache data blocks up in l1 (if a multi-tier cache<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * implementation).<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  @VisibleForTesting<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  public void setCacheDataInL1(boolean cacheDataInL1) {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    this.cacheDataInL1 = cacheDataInL1;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>  }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>  /**<a name="line.392"></a>
-<span class="sourceLineNo">393</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   *         is written, false if not<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   */<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheIndexesOnWrite;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  }<a name="line.398"></a>
-<span class="sourceLineNo">399</span><a name="line.399"></a>
-<span class="sourceLineNo">400</span>  /**<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   *         is written, false if not<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheBloomsOnWrite;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *         reader is closed, false if not<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  public boolean shouldEvictOnClose() {<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    return isBlockCacheEnabled() &amp;&amp; this.evictOnClose;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  }<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>  /**<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * Only used for testing.<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   *                     HFile reader is closed<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    this.evictOnClose = evictOnClose;<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  }<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>  /**<a name="line.425"></a>
-<span class="sourceLineNo">426</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.426"></a>
-<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  public boolean shouldCacheDataCompressed() {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataCompressed;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  }<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  /**<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!isBlockCacheEnabled()) return false;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>    switch (category) {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      case DATA:<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        return this.cacheDataCompressed;<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      default:<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        return false;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    }<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  }<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>  /**<a name="line.445"></a>
-<span class="sourceLineNo">446</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.446"></a>
-<span class="sourceLineNo">447</span>   */<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  public boolean shouldPrefetchOnOpen() {<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    return isBlockCacheEnabled() &amp;&amp; this.prefetchOnOpen;<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  }<a name="line.450"></a>
-<span class="sourceLineNo">451</span><a name="line.451"></a>
-<span class="sourceLineNo">452</span>  /**<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   * Return true if we may find this type of block in block cache.<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * &lt;p&gt;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.455"></a>
-<span class="sourceLineNo">456</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.456"></a>
-<span class="sourceLineNo">457</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.457"></a>
-<span class="sourceLineNo">458</span>   * configuration.<a name="line.458"></a>
-<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
-<span class="sourceLineNo">460</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    if (!isBlockCacheEnabled()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>      return false;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    if (cacheDataOnRead) {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      return true;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    if (prefetchOnOpen) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      return true;<a name="line.468"></a>
+<span class="sourceLineNo">199</span>  /** Whether data blocks should be stored in compressed and/or encrypted form in the cache */<a name="line.199"></a>
+<span class="sourceLineNo">200</span>  private final boolean cacheDataCompressed;<a name="line.200"></a>
+<span class="sourceLineNo">201</span><a name="line.201"></a>
+<span class="sourceLineNo">202</span>  /** Whether data blocks should be prefetched into the cache */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  private final boolean prefetchOnOpen;<a name="line.203"></a>
+<span class="sourceLineNo">204</span><a name="line.204"></a>
+<span class="sourceLineNo">205</span>  /**<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * If true and if more than one tier in this cache deploy -- e.g. CombinedBlockCache has an L1<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * and an L2 tier -- then cache data blocks up in the L1 tier (The meta blocks are likely being<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * cached up in L1 already.  At least this is the case if CombinedBlockCache).<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
+<span class="sourceLineNo">210</span>  private boolean cacheDataInL1;<a name="line.210"></a>
+<span class="sourceLineNo">211</span><a name="line.211"></a>
+<span class="sourceLineNo">212</span>  private final boolean dropBehindCompaction;<a name="line.212"></a>
+<span class="sourceLineNo">213</span><a name="line.213"></a>
+<span class="sourceLineNo">214</span>  /**<a name="line.214"></a>
+<span class="sourceLineNo">215</span>   * Create a cache configuration using the specified configuration object and<a name="line.215"></a>
+<span class="sourceLineNo">216</span>   * family descriptor.<a name="line.216"></a>
+<span class="sourceLineNo">217</span>   * @param conf hbase configuration<a name="line.217"></a>
+<span class="sourceLineNo">218</span>   * @param family column family configuration<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   */<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  public CacheConfig(Configuration conf, HColumnDescriptor family) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.221"></a>
+<span class="sourceLineNo">222</span>        family.isBlockCacheEnabled(),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        family.isInMemory(),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        // For the following flags we enable them regardless of per-schema settings<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // if they are enabled in the global configuration.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>            DEFAULT_CACHE_DATA_ON_WRITE) || family.isCacheDataOnWrite(),<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY,<a name="line.228"></a>
+<span class="sourceLineNo">229</span>            DEFAULT_CACHE_INDEXES_ON_WRITE) || family.isCacheIndexesOnWrite(),<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY,<a name="line.230"></a>
+<span class="sourceLineNo">231</span>            DEFAULT_CACHE_BLOOMS_ON_WRITE) || family.isCacheBloomsOnWrite(),<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY,<a name="line.232"></a>
+<span class="sourceLineNo">233</span>            DEFAULT_EVICT_ON_CLOSE) || family.isEvictBlocksOnClose(),<a name="line.233"></a>
+<span class="sourceLineNo">234</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.234"></a>
+<span class="sourceLineNo">235</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY,<a name="line.235"></a>
+<span class="sourceLineNo">236</span>            DEFAULT_PREFETCH_ON_OPEN) || family.isPrefetchBlocksOnOpen(),<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,<a name="line.237"></a>
+<span class="sourceLineNo">238</span>            HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1) || family.isCacheDataInL1(),<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY,DROP_BEHIND_CACHE_COMPACTION_DEFAULT)<a name="line.239"></a>
+<span class="sourceLineNo">240</span>     );<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /**<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * Create a cache configuration using the specified configuration object and<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * defaults for family level settings.<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * @param conf hbase configuration<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  public CacheConfig(Configuration conf) {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.249"></a>
+<span class="sourceLineNo">250</span>        DEFAULT_CACHE_DATA_ON_READ,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>        DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set<a name="line.251"></a>
+<span class="sourceLineNo">252</span>                           // strictly from conf<a name="line.252"></a>
+<span class="sourceLineNo">253</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),<a name="line.254"></a>
+<span class="sourceLineNo">255</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),<a name="line.256"></a>
+<span class="sourceLineNo">257</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1),<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY,DROP_BEHIND_CACHE_COMPACTION_DEFAULT)<a name="line.261"></a>
+<span class="sourceLineNo">262</span>     );<a name="line.262"></a>
+<span class="sourceLineNo">263</span>  }<a name="line.263"></a>
+<span class="sourceLineNo">264</span><a name="line.264"></a>
+<span class="sourceLineNo">265</span>  /**<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   * Create a block cache configuration with the specified cache and<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * configuration parameters.<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * @param blockCache reference to block cache, null if completely disabled<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   * @param cacheDataOnRead whether DATA blocks should be cached on read (we always cache INDEX<a name="line.269"></a>
+<span class="sourceLineNo">270</span>   * blocks and BLOOM blocks; this cannot be disabled).<a name="line.270"></a>
+<span class="sourceLineNo">271</span>   * @param inMemory whether blocks should be flagged as in-memory<a name="line.271"></a>
+<span class="sourceLineNo">272</span>   * @param cacheDataOnWrite whether data blocks should be cached on write<a name="line.272"></a>
+<span class="sourceLineNo">273</span>   * @param cacheIndexesOnWrite whether index blocks should be cached on write<a name="line.273"></a>
+<span class="sourceLineNo">274</span>   * @param cacheBloomsOnWrite whether blooms should be cached on write<a name="line.274"></a>
+<span class="sourceLineNo">275</span>   * @param evictOnClose whether blocks should be evicted when HFile is closed<a name="line.275"></a>
+<span class="sourceLineNo">276</span>   * @param cacheDataCompressed whether to store blocks as compressed in the cache<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * @param prefetchOnOpen whether to prefetch blocks upon open<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   * @param cacheDataInL1 If more than one cache tier deployed, if true, cache this column families<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   * data blocks up in the L1 tier.<a name="line.279"></a>
+<span class="sourceLineNo">280</span>   */<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  CacheConfig(final BlockCache blockCache,<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      final boolean cacheDataInL1, final boolean dropBehindCompaction) {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    this.blockCache = blockCache;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    this.inMemory = inMemory;<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    this.evictOnClose = evictOnClose;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.295"></a>
+<span class="sourceLineNo">296</span>    this.cacheDataInL1 = cacheDataInL1;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    LOG.info(this);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>  }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>  /**<a name="line.301"></a>
+<span class="sourceLineNo">302</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.302"></a>
+<span class="sourceLineNo">303</span>   * @param cacheConf<a name="line.303"></a>
+<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
+<span class="sourceLineNo">305</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    this(cacheConf.blockCache, cacheConf.cacheDataOnRead, cacheConf.inMemory,<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        cacheConf.cacheDataOnWrite, cacheConf.cacheIndexesOnWrite,<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        cacheConf.cacheBloomsOnWrite, cacheConf.evictOnClose,<a name="line.308"></a>
+<span class="sourceLineNo">309</span>        cacheConf.cacheDataCompressed, cacheConf.prefetchOnOpen,<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        cacheConf.cacheDataInL1, cacheConf.dropBehindCompaction);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Checks whether the block cache is enabled.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   */<a name="line.315"></a>
+<span class="sourceLineNo">316</span>  public boolean isBlockCacheEnabled() {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    return this.blockCache != null;<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  }<a name="line.318"></a>
+<span class="sourceLineNo">319</span><a name="line.319"></a>
+<span class="sourceLineNo">320</span>  /**<a name="line.320"></a>
+<span class="sourceLineNo">321</span>   * Returns the block cache.<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * @return the block cache, or null if caching is completely disabled<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
+<span class="sourceLineNo">324</span>  public BlockCache getBlockCache() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return this.blockCache;<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * @return true if blocks should be cached on read, false if not<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   */<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public boolean shouldCacheDataOnRead() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    return isBlockCacheEnabled() &amp;&amp; cacheDataOnRead;<a name="line.334"></a>
+<span class="sourceLineNo">335</span>  }<a name="line.335"></a>
+<span class="sourceLineNo">336</span><a name="line.336"></a>
+<span class="sourceLineNo">337</span>  public boolean shouldDropBehindCompaction() {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    return dropBehindCompaction;<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
+<span class="sourceLineNo">340</span><a name="line.340"></a>
+<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * Should we cache a block of a particular category? We always cache<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * available.<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   */<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    return isBlockCacheEnabled()<a name="line.347"></a>
+<span class="sourceLineNo">348</span>        &amp;&amp; (cacheDataOnRead ||<a name="line.348"></a>
+<span class="sourceLineNo">349</span>            category == BlockCategory.INDEX ||<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            category == BlockCategory.BLOOM ||<a name="line.350"></a>
+<span class="sourceLineNo">351</span>            (prefetchOnOpen &amp;&amp;<a name="line.351"></a>
+<span class="sourceLineNo">352</span>                (category != BlockCategory.META &amp;&amp;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>                 category != BlockCategory.UNKNOWN)));<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /**<a name="line.356"></a>
+<span class="sourceLineNo">357</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.357"></a>
+<span class="sourceLineNo">358</span>   */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  public boolean isInMemory() {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    return isBlockCacheEnabled() &amp;&amp; this.inMemory;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>  }<a name="line.361"></a>
+<span class="sourceLineNo">362</span><a name="line.362"></a>
+<span class="sourceLineNo">363</span>  /**<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @return True if cache data blocks in L1 tier (if more than one tier in block cache deploy).<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   */<a name="line.365"></a>
+<span class="sourceLineNo">366</span>  public boolean isCacheDataInL1() {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataInL1;<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  }<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>  /**<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   *         written, false if not<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  public boolean shouldCacheDataOnWrite() {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnWrite;<a name="line.375"></a>
+<span class="sourceLineNo">376</span>  }<a name="line.376"></a>
+<span class="sourceLineNo">377</span><a name="line.377"></a>
+<span class="sourceLineNo">378</span>  /**<a name="line.378"></a>
+<span class="sourceLineNo">379</span>   * Only used for testing.<a name="line.379"></a>
+<span class="sourceLineNo">380</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.380"></a>
+<span class="sourceLineNo">381</span>   *                         when an HFile is written<a name="line.381"></a>
+<span class="sourceLineNo">382</span>   */<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  @VisibleForTesting<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Only used for testing.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheDataInL1 Whether to cache data blocks up in l1 (if a multi-tier cache<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * implementation).<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @VisibleForTesting<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void setCacheDataInL1(boolean cacheDataInL1) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    this.cacheDataInL1 = cacheDataInL1;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   *         is written, false if not<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   */<a name="line.401"></a>
+<span class="sourceLineNo">402</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.402"></a>
+<span class="sourceLineNo">403</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheIndexesOnWrite;<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  }<a name="line.404"></a>
+<span class="sourceLineNo">405</span><a name="line.405"></a>
+<span class="sourceLineNo">406</span>  /**<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.407"></a>
+<span class="sourceLineNo">408</span>   *         is written, false if not<a name="line.408"></a>
+<span class="sourceLineNo">409</span>   */<a name="line.409"></a>
+<span class="sourceLineNo">410</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheBloomsOnWrite;<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
+<span class="sourceLineNo">413</span><a name="line.413"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   *         reader is closed, false if not<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   */<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  public boolean shouldEvictOnClose() {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    return isBlockCacheEnabled() &amp;&amp; this.evictOnClose;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Only used for testing.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   *                     HFile reader is closed<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   */<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    this.evictOnClose = evictOnClose;<a name="line.428"></a>
+<span class="sourceLineNo">429</span>  }<a name="line.429"></a>
+<span class="sourceLineNo">430</span><a name="line.430"></a>
+<span class="sourceLineNo">431</span>  /**<a name="line.431"></a>
+<span class="sourceLineNo">432</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.432"></a>
+<span class="sourceLineNo">433</span>   */<a name="line.433"></a>
+<span class="sourceLineNo">434</span>  public boolean shouldCacheDataCompressed() {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataCompressed;<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  }<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>  /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.439"></a>
+<span class="sourceLineNo">440</span>   */<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.441"></a>
+<span class="sourceLineNo">442</span>    if (!isBlockCacheEnabled()) return false;<a name="line.442"></a>
+<span class="sourceLineNo">443</span>    switch (category) {<a name="line.443"></a>
+<span class="sourceLineNo">444</span>      case DATA:<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        return this.cacheDataCompressed;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      default:<a name="line.446"></a>
+<span class="sourceLineNo">447</span>        return false;<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    }<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  }<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  public boolean shouldPrefetchOnOpen() {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>    return isBlockCacheEnabled() &amp;&amp; this.prefetchOnOpen;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>  }<a name="line.456"></a>
+<span class="sourceLineNo">457</span><a name="line.457"></a>
+<span class="sourceLineNo">458</span>  /**<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   * Return true if we may find this type of block in block cache.<a name="line.459"></a>
+<span class="sourceLineNo">460</span>   * &lt;p&gt;<a name="line.460"></a>
+<span class="sourceLineNo">461</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.462"></a>
+<span class="sourceLineNo">463</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.463"></a>
+<span class="sourceLineNo">464</span>   * configuration.<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   */<a name="line.465"></a>
+<span class="sourceLineNo">466</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    if (!isBlockCacheEnabled()) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>      return false;<a name="line.468"></a>
 <span class="sourceLineNo">469</span>    }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    if (cacheDataOnWrite) {<a name="line.470"></a>
+<span class="sourceLineNo">470</span>    if (cacheDataOnRead) {<a name="line.470"></a>
 <span class="sourceLineNo">471</span>      return true;<a name="line.471"></a>
 <span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    if (blockType == null) {<a name="line.473"></a>
+<span class="sourceLineNo">473</span>    if (prefetchOnOpen) {<a name="line.473"></a>
 <span class="sourceLineNo">474</span>      return true;<a name="line.474"></a>
 <span class="sourceLineNo">475</span>    }<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    if (blockType.getCategory() == BlockCategory.BLOOM ||<a name="line.476"></a>
-<span class="sourceLineNo">477</span>            blockType.getCategory() == BlockCategory.INDEX) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      return true;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    return false;<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  }<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>  /**<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * If we make sure the block could not be cached, we will not acquire the lock<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * otherwise we will acquire lock<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   */<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  public boolean shouldLockOnCacheMiss(BlockType blockType) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    if (blockType == null) {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      return true;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    }<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    return shouldCacheBlockOnRead(blockType.getCategory());<a name="line.491"></a>
-<span class="sourceLineNo">492</span>  }<a name="line.492"></a>
-<span class="sourceLineNo">493</span><a name="line.493"></a>
-<span class="sourceLineNo">494</span>  @Override<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public String toString() {<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    if (!isBlockCacheEnabled()) {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      return "CacheConfig:disabled";<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    }<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    return "blockCache=" + getBlockCache() +<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ", cacheDataOnRead=" + shouldCacheDataOnRead() +<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      ", cacheDataOnWrite=" + shouldCacheDataOnWrite() +<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite() +<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() +<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      ", cacheEvictOnClose=" + shouldEvictOnClose() +<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      ", cacheDataCompressed=" + shouldCacheDataCompressed() +<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      ", prefetchOnOpen=" + shouldPrefetchOnOpen();<a name="line.506"></a>
-<span class="sourceLineNo">507</span>  }<a name="line.507"></a>
-<span class="sourceLineNo">508</span><a name="line.508"></a>
-<span class="sourceLineNo">509</span>  // Static block cache reference and methods<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>  /**<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Static reference to the block cache, or null if no caching should be used<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * at all.<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   */<a name="line.514"></a>
-<span class="sourceLineNo">515</span>  // Clear this if in tests you'd make more than one block cache instance.<a name="line.515"></a>
-<span class="sourceLineNo">516</span>  @VisibleForTesting<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  static BlockCache GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.517"></a>
-<span class="sourceLineNo">518</span><a name="line.518"></a>
-<span class="sourceLineNo">519</span>  /** Boolean whether we have disabled the block cache entirely. */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  @VisibleForTesting<a name="line.520"></a>
-<span class="sourceLineNo">521</span>  static boolean blockCacheDisabled = false;<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  static long getLruCacheSize(final Configuration conf, final MemoryUsage mu) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    float cachePercentage = conf.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY,<a name="line.524"></a>
-<span class="sourceLineNo">525</span>      HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT);<a name="line.525"></a>
-<span class="sourceLineNo">526</span>    if (cachePercentage &lt;= 0.0001f) {<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      blockCacheDisabled = true;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return -1;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (cachePercentage &gt; 1.0) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new IllegalArgumentException(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY +<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        " must be between 0.0 and 1.0, and not &gt; 1.0");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>    // Calculate the amount of heap to give the heap.<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    return (long) (mu.getMax() * cachePercentage);<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  }<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>  /**<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @param c Configuration to use.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * @param mu JMX Memory Bean<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.542"></a>
-<span class="sourceLineNo">543</span>   */<a name="line.543"></a>
-<span class="sourceLineNo">544</span>  private static LruBlockCache getL1(final Configuration c, final MemoryUsage mu) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    long lruCacheSize = getLruCacheSize(c, mu);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    if (lruCacheSize &lt; 0) return null;<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    LOG.info("Allocating LruBlockCache size=" +<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      StringUtils.byteDesc(lruCacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize));<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    return new LruBlockCache(lruCacheSize, blockSize, true, c);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>  }<a name="line.551"></a>
-<span class="sourceLineNo">552</span><a name="line.552"></a>
-<span class="sourceLineNo">553</span>  /**<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * @param c Configuration to use.<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @param mu JMX Memory Bean<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * @return Returns L2 block cache instance (for now it is BucketCache BlockCache all the time)<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * or null if not supposed to be a L2.<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   */<a name="line.558"></a>
-<span class="sourceLineNo">559</span>  private static BlockCache getL2(final Configuration c, final MemoryUsage mu) {<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    if (LOG.isDebugEnabled()) {<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " l2 cache");<a name="line.562"></a>
-<span class="sourceLineNo">563</span>    }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>    // If we want to use an external block cache then create that.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    if (useExternal) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      return getExternalBlockcache(c);<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // otherwise use the bucket cache.<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    return getBucketCache(c, mu);<a name="line.571"></a>
-<span class="sourceLineNo">572</span><a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  private static BlockCache getExternalBlockcache(Configuration c) {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    Class klass = null;<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // Get the class, from the config. s<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    try {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      klass = ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, "memcache")).clazz;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    } catch (IllegalArgumentException exception) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      try {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        klass = c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(<a name="line.583"></a>
-<span class="sourceLineNo">584</span>            "org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));<a name="line.584"></a>
-<span class="sourceLineNo">585</span>      } catch (ClassNotFoundException e) {<a name="line.585"></a>
-<span class="sourceLineNo">586</span>        return null;<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      }<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>    // Now try and create an instance of the block cache.<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    try {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      LOG.info("Crea

<TRUNCATED>

[30/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
index 8a9e4fa..413a698 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockType.html
@@ -290,14 +290,10 @@ the order they are declared.</div>
                                 <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>void</code></td>
-<td class="colLast"><span class="strong">HFileBlock.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#expectType(org.apache.hadoop.hbase.io.hfile.BlockType)">expectType</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;expectedType)</code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>private <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a></code></td>
 <td class="colLast"><span class="strong">HFileReaderImpl.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#getBloomFilterMetadata(org.apache.hadoop.hbase.io.hfile.BlockType)">getBloomFilterMetadata</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><span class="strong">HFileReaderImpl.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#getCachedBlock(org.apache.hadoop.hbase.io.hfile.BlockCacheKey,%20boolean,%20boolean,%20boolean,%20boolean,%20org.apache.hadoop.hbase.io.hfile.BlockType,%20org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)">getCachedBlock</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                             boolean&nbsp;cacheBlock,
@@ -309,14 +305,14 @@ the order they are declared.</div>
 <div class="block">Retrieve block from cache.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><span class="strong">HFileBlock.BlockIterator.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#nextBlockWithBlockType(org.apache.hadoop.hbase.io.hfile.BlockType)">nextBlockWithBlockType</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>
 <div class="block">Similar to <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#nextBlock()"><code>HFileBlock.BlockIterator.nextBlock()</code></a> but checks block type, throws an
  exception if incorrect, and returns the HFile block</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><span class="strong">HFile.CachingBlockReader.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#readBlock(long,%20long,%20boolean,%20boolean,%20boolean,%20boolean,%20org.apache.hadoop.hbase.io.hfile.BlockType,%20org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)">readBlock</a></strong>(long&nbsp;offset,
                   long&nbsp;onDiskBlockSize,
@@ -329,7 +325,7 @@ the order they are declared.</div>
 <div class="block">Read in a file block.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><span class="strong">HFileReaderImpl.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#readBlock(long,%20long,%20boolean,%20boolean,%20boolean,%20boolean,%20org.apache.hadoop.hbase.io.hfile.BlockType,%20org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)">readBlock</a></strong>(long&nbsp;dataBlockOffset,
                   long&nbsp;onDiskBlockSize,
@@ -340,31 +336,31 @@ the order they are declared.</div>
                   <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;expectedBlockType,
                   <a href="../../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;expectedDataBlockEncoding)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="strong">HFileBlock.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#sanityCheckAssertion(org.apache.hadoop.hbase.io.hfile.BlockType,%20org.apache.hadoop.hbase.io.hfile.BlockType)">sanityCheckAssertion</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromBuf,
                                         <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;valueFromField)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="strong">CacheConfig.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldLockOnCacheMiss(org.apache.hadoop.hbase.io.hfile.BlockType)">shouldLockOnCacheMiss</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>
 <div class="block">If we make sure the block could not be cached, we will not acquire the lock
  otherwise we will acquire lock</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><span class="strong">CacheConfig.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldReadBlockFromCache(org.apache.hadoop.hbase.io.hfile.BlockType)">shouldReadBlockFromCache</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>
 <div class="block">Return true if we may find this type of block in block cache.</div>
 </td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a></code></td>
+<tr class="altColor">
+<td class="colFirst"><code>(package private) <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a></code></td>
 <td class="colLast"><span class="strong">HFileBlock.Writer.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#startWriting(org.apache.hadoop.hbase.io.hfile.BlockType)">startWriting</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;newBlockType)</code>
 <div class="block">Starts writing into the block.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><span class="strong">HFileReaderImpl.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#validateBlockType(org.apache.hadoop.hbase.io.hfile.HFileBlock,%20org.apache.hadoop.hbase.io.hfile.BlockType)">validateBlockType</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block,
                                   <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;expectedBlockType)</code>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
index d8ebb95..3942155 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
@@ -288,7 +288,7 @@
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><span class="strong">HFileBlock.Writer.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#getBlockForCaching(org.apache.hadoop.hbase.io.hfile.CacheConfig)">getBlockForCaching</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</code>
 <div class="block">Creates a new HFileBlock.</div>
 </td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
index 4c339b0..babe95d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
@@ -122,7 +122,7 @@
 <tr class="altColor">
 <td class="colFirst"><code>class&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></strong></code>
-<div class="block">Reading <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and 2 blocks, and writing version 2 blocks.</div>
+<div class="block">Reads <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and version 2 blocks but writes version 2 blocks only.</div>
 </td>
 </tr>
 </tbody>
@@ -149,7 +149,9 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code>(package private) static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
-<td class="colLast"><span class="strong">HFileBlock.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#blockDeserializer">blockDeserializer</a></strong></code>&nbsp;</td>
+<td class="colLast"><span class="strong">HFileBlock.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#blockDeserializer">blockDeserializer</a></strong></code>
+<div class="block">Used deserializing blocks from Cache.</div>
+</td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>private static <a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&gt;</code></td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html
index 8376bf0..08f48f6 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheableDeserializer.html
@@ -106,7 +106,9 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code>(package private) static <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;</code></td>
-<td class="colLast"><span class="strong">HFileBlock.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#blockDeserializer">blockDeserializer</a></strong></code>&nbsp;</td>
+<td class="colLast"><span class="strong">HFileBlock.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#blockDeserializer">blockDeserializer</a></strong></code>
+<div class="block">Used deserializing blocks from Cache.</div>
+</td>
 </tr>
 </tbody>
 </table>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockWritable.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockWritable.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockWritable.html
index 1758d2e..fe17fe4 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockWritable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.BlockWritable.html
@@ -113,7 +113,7 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><span class="strong">HFileBlock.Writer.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeBlock(org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable,%20org.apache.hadoop.fs.FSDataOutputStream)">writeBlock</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a>&nbsp;bw,
                     org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)</code>
 <div class="block">Takes the given <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock.BlockWritable</code></a> instance, creates a new block of

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
index 1061d30..799958b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.Writer.html
@@ -103,7 +103,7 @@
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a></code></td>
-<td class="colLast"><span class="strong">HFileWriterImpl.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#fsBlockWriter">fsBlockWriter</a></strong></code>
+<td class="colLast"><span class="strong">HFileWriterImpl.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#blockWriter">blockWriter</a></strong></code>
 <div class="block">block writer</div>
 </td>
 </tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html
index 6d4d805..4d1e3ec 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html
@@ -139,7 +139,7 @@
 <td class="colLast"><span class="strong">MemcachedBlockCache.HFileBlockTranscoder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.HFileBlockTranscoder.html#decode(net.spy.memcached.CachedData)">decode</a></strong>(net.spy.memcached.CachedData&nbsp;d)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><span class="strong">HFileBlock.Writer.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#getBlockForCaching(org.apache.hadoop.hbase.io.hfile.CacheConfig)">getBlockForCaching</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</code>
 <div class="block">Creates a new HFileBlock.</div>
 </td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
index 6304015..0d56ead 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileContext.html
@@ -316,7 +316,7 @@
 <td class="colLast"><span class="strong">HFileReaderImpl.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#getFileContext()">getFileContext</a></strong>()</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a></code></td>
 <td class="colLast"><span class="strong">HFileBlock.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getHFileContext()">getHFileContext</a></strong>()</code>&nbsp;</td>
 </tr>
 </tbody>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html
index 2af0446..f411ecb 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-summary.html
@@ -308,7 +308,7 @@
 <tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></td>
 <td class="colLast">
-<div class="block">Reading <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and 2 blocks, and writing version 2 blocks.</div>
+<div class="block">Reads <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and version 2 blocks but writes version 2 blocks only.</div>
 </td>
 </tr>
 <tr class="rowColor">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index be4d48b..14a3305 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -224,6 +224,7 @@
 <ul>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFile.Reader</span></a> (also extends org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.CachingBlockReader</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFile.Writer</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileScanner</span></a> (also extends org.apache.hadoop.hbase.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a>)</li>
 </ul>
 </li>
 </ul>
@@ -260,7 +261,7 @@
 </li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver"><span class="strong">Shipper</span></a>
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileScanner</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileScanner</span></a> (also extends java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>)</li>
 </ul>
 </li>
 </ul>
@@ -271,9 +272,9 @@
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="strong">BlockType.BlockCategory</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="strong">BlockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="strong">BlockPriority</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileBlock.Writer.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="strong">BlockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="strong">CacheConfig.ExternalBlockCaches</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="strong">Cacheable.MemoryType</span></a></li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html
index 4a27598..1cc343d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-use.html
@@ -352,7 +352,7 @@
 </tr>
 <tr class="altColor">
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.html#org.apache.hadoop.hbase.io.hfile">HFileBlock</a>
-<div class="block">Reading <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and 2 blocks, and writing version 2 blocks.</div>
+<div class="block">Reads <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and version 2 blocks but writes version 2 blocks only.</div>
 </td>
 </tr>
 <tr class="rowColor">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
index 60d1885..236e3be 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
@@ -106,7 +106,7 @@
 <br>
 <pre><a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Public.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Public</a>
 <a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceStability.Stable.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceStability.Stable</a>
-public abstract class <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.101">TableInputFormatBase</a>
+public abstract class <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.100">TableInputFormatBase</a>
 extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>,<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&gt;</pre>
 <div class="block">A base for <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableInputFormat.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableInputFormat</code></a>s. Receives a <a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client"><code>Connection</code></a>, a <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase"><code>TableName</code></a>,
  an <a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client"><code>Scan</code></a> instance that defines the input columns etc. Subclasses may use
@@ -403,7 +403,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>MAPREDUCE_INPUT_AUTOBALANCE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.105">MAPREDUCE_INPUT_AUTOBALANCE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.104">MAPREDUCE_INPUT_AUTOBALANCE</a></pre>
 <div class="block">Specify if we enable auto-balance for input in M/R jobs.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.MAPREDUCE_INPUT_AUTOBALANCE">Constant Field Values</a></dd></dl>
 </li>
@@ -414,7 +414,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>INPUT_AUTOBALANCE_MAXSKEWRATIO</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.108">INPUT_AUTOBALANCE_MAXSKEWRATIO</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.107">INPUT_AUTOBALANCE_MAXSKEWRATIO</a></pre>
 <div class="block">Specify if ratio for data skew in M/R jobs, it goes well with the enabling hbase.mapreduce
  .input.autobalance property.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.INPUT_AUTOBALANCE_MAXSKEWRATIO">Constant Field Values</a></dd></dl>
@@ -426,7 +426,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_ROW_TEXTKEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.112">TABLE_ROW_TEXTKEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.111">TABLE_ROW_TEXTKEY</a></pre>
 <div class="block">Specify if the row key in table is text (ASCII between 32~126),
  default is true. False means the table is using binary row key</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.TABLE_ROW_TEXTKEY">Constant Field Values</a></dd></dl>
@@ -438,7 +438,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.114">LOG</a></pre>
+<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.113">LOG</a></pre>
 </li>
 </ul>
 <a name="NOT_INITIALIZED">
@@ -447,7 +447,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_INITIALIZED</h4>
-<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.116">NOT_INITIALIZED</a></pre>
+<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.115">NOT_INITIALIZED</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.NOT_INITIALIZED">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -457,7 +457,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>INITIALIZATION_ERROR</h4>
-<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.119">INITIALIZATION_ERROR</a></pre>
+<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.118">INITIALIZATION_ERROR</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.INITIALIZATION_ERROR">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -467,7 +467,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>scan</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.126">scan</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.125">scan</a></pre>
 <div class="block">Holds the details for the internal scanner.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client"><code>Scan</code></a></dd></dl>
 </li>
@@ -478,7 +478,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>admin</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.128">admin</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.127">admin</a></pre>
 <div class="block">The <a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client"><code>Admin</code></a>.</div>
 </li>
 </ul>
@@ -488,7 +488,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>table</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.130">table</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.129">table</a></pre>
 <div class="block">The <a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client"><code>Table</code></a> to scan.</div>
 </li>
 </ul>
@@ -498,7 +498,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>regionLocator</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.132">regionLocator</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.131">regionLocator</a></pre>
 <div class="block">The <a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client"><code>RegionLocator</code></a> of the table.</div>
 </li>
 </ul>
@@ -508,7 +508,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>tableRecordReader</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce">TableRecordReader</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.134">tableRecordReader</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce">TableRecordReader</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.133">tableRecordReader</a></pre>
 <div class="block">The reader scanning the table, can be a custom one.</div>
 </li>
 </ul>
@@ -518,7 +518,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>connection</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.136">connection</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.135">connection</a></pre>
 <div class="block">The underlying <a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client"><code>Connection</code></a> of the table.</div>
 </li>
 </ul>
@@ -528,7 +528,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockListLast">
 <li class="blockList">
 <h4>reverseDNSCacheMap</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetAddress.html?is-external=true" title="class or interface in java.net">InetAddress</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.140">reverseDNSCacheMap</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/HashMap.html?is-external=true" title="class or interface in java.util">HashMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetAddress.html?is-external=true" title="class or interface in java.net">InetAddress</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.139">reverseDNSCacheMap</a></pre>
 <div class="block">The reverse DNS lookup cache mapping: IPAddress => HostName</div>
 </li>
 </ul>
@@ -546,7 +546,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TableInputFormatBase</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.101">TableInputFormatBase</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.100">TableInputFormatBase</a>()</pre>
 </li>
 </ul>
 </li>
@@ -563,7 +563,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>createRecordReader</h4>
-<pre>public&nbsp;org.apache.hadoop.mapreduce.RecordReader&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>,<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.156">createRecordReader</a>(org.apache.hadoop.mapreduce.InputSplit&nbsp;split,
+<pre>public&nbsp;org.apache.hadoop.mapreduce.RecordReader&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>,<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.155">createRecordReader</a>(org.apache.hadoop.mapreduce.InputSplit&nbsp;split,
                                                                                          org.apache.hadoop.mapreduce.TaskAttemptContext&nbsp;context)
                                                                                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Builds a <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableRecordReader</code></a>. If no <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableRecordReader</code></a> was provided, uses
@@ -585,7 +585,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>getStartEndKeys</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;byte[][],byte[][]&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.217">getStartEndKeys</a>()
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;byte[][],byte[][]&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.216">getStartEndKeys</a>()
                                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -597,7 +597,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>getSplits</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.232">getSplits</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.231">getSplits</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
                                                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Calculates the splits that will serve as input for the map tasks. The
  number of splits matches the number of regions in a table.</div>
@@ -617,7 +617,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>reverseDNS</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.340">reverseDNS</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetAddress.html?is-external=true" title="class or interface in java.net">InetAddress</a>&nbsp;ipAddress)
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.339">reverseDNS</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetAddress.html?is-external=true" title="class or interface in java.net">InetAddress</a>&nbsp;ipAddress)
             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/net/UnknownHostException.html?is-external=true" title="class or interface in java.net">UnknownHostException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/net/UnknownHostException.html?is-external=true" title="class or interface in java.net">UnknownHostException</a></code></dd></dl>
@@ -629,7 +629,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <ul class="blockList">
 <li class="blockList">
 <h4>calculateRebalancedSplits</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.372">calculateRebalancedSplits</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;list,
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.371">calculateRebalancedSplits</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;list,
                                                                      org.apache.hadoop.mapreduce.JobContext&nbsp;context,
                                                                      long&nbsp;average)
                                                                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -650,7 +650,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;<a href="../../../../../org/a
 <li class="blockList">
 <h4>getSplitKey</h4>
 <pre><a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.447">getSplitKey</a>(byte[]&nbsp;start,
+public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.446">getSplitKey</a>(byte[]&nbsp;start,
                                            byte[]&nbsp;end,
                                            boolean&nbsp;isText)</pre>
 <div class="block">select a split point in the region. The selection of the split point is based on an uniform
@@ -672,7 +672,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>includeRegionInSplit</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.545">includeRegionInSplit</a>(byte[]&nbsp;startKey,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.544">includeRegionInSplit</a>(byte[]&nbsp;startKey,
                            byte[]&nbsp;endKey)</pre>
 <div class="block">Test if the given region is to be included in the InputSplit while splitting
  the regions of a table.
@@ -696,7 +696,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionLocator</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.552">getRegionLocator</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.551">getRegionLocator</a>()</pre>
 <div class="block">Allows subclasses to get the <a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client"><code>RegionLocator</code></a>.</div>
 </li>
 </ul>
@@ -706,7 +706,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>getTable</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.562">getTable</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.561">getTable</a>()</pre>
 <div class="block">Allows subclasses to get the <a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client"><code>Table</code></a>.</div>
 </li>
 </ul>
@@ -716,7 +716,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>getAdmin</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.572">getAdmin</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.571">getAdmin</a>()</pre>
 <div class="block">Allows subclasses to get the <a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client"><code>Admin</code></a>.</div>
 </li>
 </ul>
@@ -726,7 +726,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>initializeTable</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.586">initializeTable</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;connection,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.585">initializeTable</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;connection,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Allows subclasses to initialize the table information.</div>
@@ -741,7 +741,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>getScan</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.602">getScan</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.601">getScan</a>()</pre>
 <div class="block">Gets the scan defining the actual details like columns etc.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>The internal scan instance.</dd></dl>
 </li>
@@ -752,7 +752,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>setScan</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.612">setScan</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.611">setScan</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan)</pre>
 <div class="block">Sets the scan defining the actual details like columns etc.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>scan</code> - The scan to set.</dd></dl>
 </li>
@@ -763,7 +763,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>setTableRecordReader</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.622">setTableRecordReader</a>(<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce">TableRecordReader</a>&nbsp;tableRecordReader)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.621">setTableRecordReader</a>(<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce">TableRecordReader</a>&nbsp;tableRecordReader)</pre>
 <div class="block">Allows subclasses to set the <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableRecordReader</code></a>.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>tableRecordReader</code> - A different <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableRecordReader.html" title="class in org.apache.hadoop.hbase.mapreduce"><code>TableRecordReader</code></a>
    implementation.</dd></dl>
@@ -775,7 +775,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>initialize</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.641">initialize</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.640">initialize</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Handle subclass specific set up.
  Each of the entry points used by the MapReduce framework,
@@ -799,7 +799,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockList">
 <li class="blockList">
 <h4>closeTable</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.650">closeTable</a>()
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.649">closeTable</a>()
                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Close the Table and related objects that were initialized via
  <a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#initializeTable(org.apache.hadoop.hbase.client.Connection,%20org.apache.hadoop.hbase.TableName)"><code>initializeTable(Connection, TableName)</code></a>.</div>
@@ -813,7 +813,7 @@ public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>close</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.658">close</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>...&nbsp;closables)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html#line.657">close</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>...&nbsp;closables)
             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
index 9d6c081..4283a84 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.816">SplitLogManager.ResubmitDirective</a>
+<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.815">SplitLogManager.ResubmitDirective</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a>&gt;</pre>
 </li>
 </ul>
@@ -196,7 +196,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>CHECK</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html#line.817">CHECK</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html#line.816">CHECK</a></pre>
 </li>
 </ul>
 <a name="FORCE">
@@ -205,7 +205,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FORCE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html#line.817">FORCE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html#line.816">FORCE</a></pre>
 </li>
 </ul>
 </li>
@@ -222,7 +222,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html#line.624">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html#line.623">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -239,7 +239,7 @@ for (SplitLogManager.ResubmitDirective c : SplitLogManager.ResubmitDirective.val
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html#line.624">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.ResubmitDirective</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html#line.623">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html
index 123ee04..b80e892 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.Task.html
@@ -100,7 +100,7 @@
 <hr>
 <br>
 <pre><a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.640">SplitLogManager.Task</a>
+public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.639">SplitLogManager.Task</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">in memory state of an active task.</div>
 </li>
@@ -239,7 +239,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>last_update</h4>
-<pre>public volatile&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.641">last_update</a></pre>
+<pre>public volatile&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.640">last_update</a></pre>
 </li>
 </ul>
 <a name="last_version">
@@ -248,7 +248,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>last_version</h4>
-<pre>public volatile&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.642">last_version</a></pre>
+<pre>public volatile&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.641">last_version</a></pre>
 </li>
 </ul>
 <a name="cur_worker_name">
@@ -257,7 +257,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>cur_worker_name</h4>
-<pre>public volatile&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.643">cur_worker_name</a></pre>
+<pre>public volatile&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.642">cur_worker_name</a></pre>
 </li>
 </ul>
 <a name="batch">
@@ -266,7 +266,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>batch</h4>
-<pre>public volatile&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TaskBatch</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.644">batch</a></pre>
+<pre>public volatile&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TaskBatch</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.643">batch</a></pre>
 </li>
 </ul>
 <a name="status">
@@ -275,7 +275,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>status</h4>
-<pre>public volatile&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.645">status</a></pre>
+<pre>public volatile&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.644">status</a></pre>
 </li>
 </ul>
 <a name="incarnation">
@@ -284,7 +284,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>incarnation</h4>
-<pre>public volatile&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.646">incarnation</a></pre>
+<pre>public volatile&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.645">incarnation</a></pre>
 </li>
 </ul>
 <a name="unforcedResubmits">
@@ -293,7 +293,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>unforcedResubmits</h4>
-<pre>public final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.647">unforcedResubmits</a></pre>
+<pre>public final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.646">unforcedResubmits</a></pre>
 </li>
 </ul>
 <a name="resubmitThresholdReached">
@@ -302,7 +302,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>resubmitThresholdReached</h4>
-<pre>public volatile&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.648">resubmitThresholdReached</a></pre>
+<pre>public volatile&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.647">resubmitThresholdReached</a></pre>
 </li>
 </ul>
 </li>
@@ -319,7 +319,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SplitLogManager.Task</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.657">SplitLogManager.Task</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.656">SplitLogManager.Task</a>()</pre>
 </li>
 </ul>
 </li>
@@ -336,7 +336,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.651">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.650">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -349,7 +349,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isOrphan</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.663">isOrphan</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.662">isOrphan</a>()</pre>
 </li>
 </ul>
 <a name="isUnassigned()">
@@ -358,7 +358,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isUnassigned</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.667">isUnassigned</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.666">isUnassigned</a>()</pre>
 </li>
 </ul>
 <a name="heartbeatNoDetails(long)">
@@ -367,7 +367,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>heartbeatNoDetails</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.671">heartbeatNoDetails</a>(long&nbsp;time)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.670">heartbeatNoDetails</a>(long&nbsp;time)</pre>
 </li>
 </ul>
 <a name="heartbeat(long, int, org.apache.hadoop.hbase.ServerName)">
@@ -376,7 +376,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>heartbeat</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.675">heartbeat</a>(long&nbsp;time,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.674">heartbeat</a>(long&nbsp;time,
              int&nbsp;version,
              <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;worker)</pre>
 </li>
@@ -387,7 +387,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>setUnassigned</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.681">setUnassigned</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.Task.html#line.680">setUnassigned</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html
index 1f5d532..6917adb 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html
@@ -100,7 +100,7 @@
 <hr>
 <br>
 <pre><a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.624">SplitLogManager.TaskBatch</a>
+public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.623">SplitLogManager.TaskBatch</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Keeps track of the batch of tasks submitted together by a caller in splitLogDistributed().
  Clients threads use this object to wait for all their tasks to be done.
@@ -204,7 +204,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>installed</h4>
-<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.625">installed</a></pre>
+<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.624">installed</a></pre>
 </li>
 </ul>
 <a name="done">
@@ -213,7 +213,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>done</h4>
-<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.626">done</a></pre>
+<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.625">done</a></pre>
 </li>
 </ul>
 <a name="error">
@@ -222,7 +222,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>error</h4>
-<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.627">error</a></pre>
+<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.626">error</a></pre>
 </li>
 </ul>
 <a name="isDead">
@@ -231,7 +231,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isDead</h4>
-<pre>public volatile&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.628">isDead</a></pre>
+<pre>public volatile&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.627">isDead</a></pre>
 </li>
 </ul>
 </li>
@@ -248,7 +248,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SplitLogManager.TaskBatch</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.624">SplitLogManager.TaskBatch</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.623">SplitLogManager.TaskBatch</a>()</pre>
 </li>
 </ul>
 </li>
@@ -265,7 +265,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.631">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html#line.630">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>


[39/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
index 7eb93c8..07553fd 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>protected static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.109">BufferedDataBlockEncoder.SeekerState</a>
+<pre>protected static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.111">BufferedDataBlockEncoder.SeekerState</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -298,7 +298,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>currentBuffer</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.110">currentBuffer</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.112">currentBuffer</a></pre>
 </li>
 </ul>
 <a name="tagCompressionContext">
@@ -307,7 +307,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>tagCompressionContext</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/TagCompressionContext.html" title="class in org.apache.hadoop.hbase.io">TagCompressionContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.111">tagCompressionContext</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/TagCompressionContext.html" title="class in org.apache.hadoop.hbase.io">TagCompressionContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.113">tagCompressionContext</a></pre>
 </li>
 </ul>
 <a name="valueOffset">
@@ -316,7 +316,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>valueOffset</h4>
-<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.112">valueOffset</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.114">valueOffset</a></pre>
 </li>
 </ul>
 <a name="keyLength">
@@ -325,7 +325,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>keyLength</h4>
-<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.113">keyLength</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.115">keyLength</a></pre>
 </li>
 </ul>
 <a name="valueLength">
@@ -334,7 +334,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>valueLength</h4>
-<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.114">valueLength</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.116">valueLength</a></pre>
 </li>
 </ul>
 <a name="lastCommonPrefix">
@@ -343,7 +343,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>lastCommonPrefix</h4>
-<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.115">lastCommonPrefix</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.117">lastCommonPrefix</a></pre>
 </li>
 </ul>
 <a name="tagsLength">
@@ -352,7 +352,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsLength</h4>
-<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.116">tagsLength</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.118">tagsLength</a></pre>
 </li>
 </ul>
 <a name="tagsOffset">
@@ -361,7 +361,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsOffset</h4>
-<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.117">tagsOffset</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.119">tagsOffset</a></pre>
 </li>
 </ul>
 <a name="tagsCompressedLength">
@@ -370,7 +370,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsCompressedLength</h4>
-<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.118">tagsCompressedLength</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.120">tagsCompressedLength</a></pre>
 </li>
 </ul>
 <a name="uncompressTags">
@@ -379,7 +379,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>uncompressTags</h4>
-<pre>protected&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.119">uncompressTags</a></pre>
+<pre>protected&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.121">uncompressTags</a></pre>
 </li>
 </ul>
 <a name="keyBuffer">
@@ -388,7 +388,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>keyBuffer</h4>
-<pre>protected&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.122">keyBuffer</a></pre>
+<pre>protected&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.124">keyBuffer</a></pre>
 <div class="block">We need to store a copy of the key.</div>
 </li>
 </ul>
@@ -398,7 +398,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsBuffer</h4>
-<pre>protected&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.123">tagsBuffer</a></pre>
+<pre>protected&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.125">tagsBuffer</a></pre>
 </li>
 </ul>
 <a name="memstoreTS">
@@ -407,7 +407,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>memstoreTS</h4>
-<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.125">memstoreTS</a></pre>
+<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.127">memstoreTS</a></pre>
 </li>
 </ul>
 <a name="nextKvOffset">
@@ -416,7 +416,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>nextKvOffset</h4>
-<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.126">nextKvOffset</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.128">nextKvOffset</a></pre>
 </li>
 </ul>
 <a name="currentKey">
@@ -425,7 +425,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>currentKey</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue.KeyOnlyKeyValue</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.127">currentKey</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue.KeyOnlyKeyValue</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.129">currentKey</a></pre>
 </li>
 </ul>
 <a name="tmpPair">
@@ -434,7 +434,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>tmpPair</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/ObjectIntPair.html" title="class in org.apache.hadoop.hbase.util">ObjectIntPair</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.130">tmpPair</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/ObjectIntPair.html" title="class in org.apache.hadoop.hbase.util">ObjectIntPair</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.132">tmpPair</a></pre>
 </li>
 </ul>
 <a name="includeTags">
@@ -443,7 +443,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>includeTags</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.131">includeTags</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.133">includeTags</a></pre>
 </li>
 </ul>
 </li>
@@ -460,7 +460,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BufferedDataBlockEncoder.SeekerState</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.133">BufferedDataBlockEncoder.SeekerState</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/ObjectIntPair.html" title="class in org.apache.hadoop.hbase.util">ObjectIntPair</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;tmpPair,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.135">BufferedDataBlockEncoder.SeekerState</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/ObjectIntPair.html" title="class in org.apache.hadoop.hbase.util">ObjectIntPair</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;tmpPair,
                                     boolean&nbsp;includeTags)</pre>
 </li>
 </ul>
@@ -478,7 +478,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isValid</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.138">isValid</a>()</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.140">isValid</a>()</pre>
 </li>
 </ul>
 <a name="invalidate()">
@@ -487,7 +487,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>invalidate</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.142">invalidate</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.144">invalidate</a>()</pre>
 </li>
 </ul>
 <a name="ensureSpaceForKey()">
@@ -496,7 +496,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ensureSpaceForKey</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.150">ensureSpaceForKey</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.152">ensureSpaceForKey</a>()</pre>
 </li>
 </ul>
 <a name="ensureSpaceForTags()">
@@ -505,7 +505,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ensureSpaceForTags</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.163">ensureSpaceForTags</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.165">ensureSpaceForTags</a>()</pre>
 </li>
 </ul>
 <a name="setKey(byte[], long)">
@@ -514,7 +514,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>setKey</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.176">setKey</a>(byte[]&nbsp;keyBuffer,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.178">setKey</a>(byte[]&nbsp;keyBuffer,
           long&nbsp;memTS)</pre>
 </li>
 </ul>
@@ -524,7 +524,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>copyFromNext</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.186">copyFromNext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder.SeekerState</a>&nbsp;nextState)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.188">copyFromNext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder.SeekerState</a>&nbsp;nextState)</pre>
 <div class="block">Copy the state from the next one into this instance (the previous state
  placeholder). Used to save the previous state when we are advancing the
  seeker to the next key/value.</div>
@@ -536,7 +536,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>toCell</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.216">toCell</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.218">toCell</a>()</pre>
 </li>
 </ul>
 <a name="toOnheapCell(java.nio.ByteBuffer, int, int)">
@@ -545,7 +545,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>toOnheapCell</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.239">toOnheapCell</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;valAndTagsBuffer,
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.241">toOnheapCell</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;valAndTagsBuffer,
                 int&nbsp;vOffset,
                 int&nbsp;tagsLenSerializationSize)</pre>
 </li>
@@ -556,7 +556,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toOffheapCell</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.261">toOffheapCell</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;valAndTagsBuffer,
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html#line.263">toOffheapCell</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;valAndTagsBuffer,
                  int&nbsp;vOffset,
                  int&nbsp;tagsLenSerializationSize)</pre>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
index 7e89cc2..6ba8536 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
@@ -173,7 +173,9 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>private static int</code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#INITIAL_KEY_BUFFER_SIZE">INITIAL_KEY_BUFFER_SIZE</a></strong></code>&nbsp;</td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#INITIAL_KEY_BUFFER_SIZE">INITIAL_KEY_BUFFER_SIZE</a></strong></code>
+<div class="block">TODO: This datablockencoder is dealing in internals of hfileblocks.</div>
+</td>
 </tr>
 </table>
 </li>
@@ -345,7 +347,8 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>INITIAL_KEY_BUFFER_SIZE</h4>
-<pre>private static&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.56">INITIAL_KEY_BUFFER_SIZE</a></pre>
+<pre>private static&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.58">INITIAL_KEY_BUFFER_SIZE</a></pre>
+<div class="block">TODO: This datablockencoder is dealing in internals of hfileblocks. Purge reference to HFBs</div>
 </li>
 </ul>
 </li>
@@ -379,7 +382,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>decodeKeyValues</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.59">decodeKeyValues</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;source,
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.61">decodeKeyValues</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;source,
                          <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;blkDecodingCtx)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#decodeKeyValues(java.io.DataInputStream,%20org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext)">DataBlockEncoder</a></code></strong></div>
@@ -399,7 +402,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>compareCommonRowPrefix</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.90">compareCommonRowPrefix</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.92">compareCommonRowPrefix</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
                          <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;right,
                          int&nbsp;rowCommonPrefix)</pre>
 <div class="block">common prefixes</div>
@@ -411,7 +414,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>compareCommonFamilyPrefix</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.96">compareCommonFamilyPrefix</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.98">compareCommonFamilyPrefix</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
                             <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;right,
                             int&nbsp;familyCommonPrefix)</pre>
 </li>
@@ -422,7 +425,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>compareCommonQualifierPrefix</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.102">compareCommonQualifierPrefix</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.104">compareCommonQualifierPrefix</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
                                <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;right,
                                int&nbsp;qualCommonPrefix)</pre>
 </li>
@@ -433,7 +436,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>afterEncodingKeyValue</h4>
-<pre>protected final&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.993">afterEncodingKeyValue</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
+<pre>protected final&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.995">afterEncodingKeyValue</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
                         <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out,
                         <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultEncodingContext</a>&nbsp;encodingCtx)
                                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -449,7 +452,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>afterDecodingKeyValue</h4>
-<pre>protected final&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1024">afterDecodingKeyValue</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;source,
+<pre>protected final&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1026">afterDecodingKeyValue</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;source,
                          <a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;dest,
                          <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultDecodingContext</a>&nbsp;decodingCtx)
                                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -463,7 +466,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>newDataBlockEncodingContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1057">newDataBlockEncodingContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;encoding,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1059">newDataBlockEncodingContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;encoding,
                                                     byte[]&nbsp;header,
                                                     <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;meta)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#newDataBlockEncodingContext(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding,%20byte[],%20org.apache.hadoop.hbase.io.hfile.HFileContext)">DataBlockEncoder</a></code></strong></div>
@@ -482,7 +485,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>newDataBlockDecodingContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1063">newDataBlockDecodingContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;meta)</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1065">newDataBlockDecodingContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;meta)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#newDataBlockDecodingContext(org.apache.hadoop.hbase.io.hfile.HFileContext)">DataBlockEncoder</a></code></strong></div>
 <div class="block">Creates an encoder specific decoding context, which will prepare the data
  before actual decoding</div>
@@ -499,7 +502,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>internalDecodeKeyValues</h4>
-<pre>protected abstract&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1067">internalDecodeKeyValues</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;source,
+<pre>protected abstract&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1069">internalDecodeKeyValues</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;source,
                                  int&nbsp;allocateHeaderLength,
                                  int&nbsp;skipLastBytes,
                                  <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultDecodingContext</a>&nbsp;decodingCtx)
@@ -514,7 +517,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>ensureSpace</h4>
-<pre>protected static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1078">ensureSpace</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;out,
+<pre>protected static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1080">ensureSpace</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;out,
                int&nbsp;length)
                            throws <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/EncoderBufferTooSmallException.html" title="class in org.apache.hadoop.hbase.io.encoding">EncoderBufferTooSmallException</a></pre>
 <div class="block">Asserts that there is at least the given amount of unfilled space
@@ -530,7 +533,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>startBlockEncoding</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1089">startBlockEncoding</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;blkEncodingCtx,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1091">startBlockEncoding</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;blkEncodingCtx,
                       <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#startBlockEncoding(org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext,%20java.io.DataOutputStream)">DataBlockEncoder</a></code></strong></div>
@@ -550,7 +553,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>encode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1125">encode</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1127">encode</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
          <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;encodingCtx,
          <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -570,7 +573,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>internalEncode</h4>
-<pre>public abstract&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1134">internalEncode</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
+<pre>public abstract&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1136">internalEncode</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
                  <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultEncodingContext</a>&nbsp;encodingCtx,
                  <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
                             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -584,7 +587,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>endBlockEncoding</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1138">endBlockEncoding</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;encodingCtx,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1140">endBlockEncoding</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;encodingCtx,
                     <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out,
                     byte[]&nbsp;uncompressedBytesWithHeader)
                       throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -604,7 +607,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createFirstKeyCell</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1153">createFirstKeyCell</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;key,
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1155">createFirstKeyCell</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;key,
                       int&nbsp;keyLength)</pre>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html b/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html
index fad94d3..5af299a 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html
@@ -95,7 +95,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.136">DataBlockEncoder.EncodedSeeker</a></pre>
+<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.137">DataBlockEncoder.EncodedSeeker</a></pre>
 <div class="block">An interface which enable to seek while underlying data is encoded.
 
  It works on one HFileBlock, but it is reusable. See
@@ -193,7 +193,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>setCurrentBuffer</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.141">setCurrentBuffer</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buffer)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.142">setCurrentBuffer</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buffer)</pre>
 <div class="block">Set on which buffer there will be done seeking.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>buffer</code> - Used for seeking.</dd></dl>
 </li>
@@ -204,7 +204,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>getKey</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.148">getKey</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.149">getKey</a>()</pre>
 <div class="block">From the current position creates a cell using the key part
  of the current buffer</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>key at current position</dd></dl>
@@ -216,7 +216,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueShallowCopy</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.156">getValueShallowCopy</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.157">getValueShallowCopy</a>()</pre>
 <div class="block">Does a shallow copy of the value at the current position. A shallow
  copy is possible because the returned buffer refers to the backing array
  of the original encoded buffer.</div>
@@ -229,7 +229,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>getCell</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.161">getCell</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.162">getCell</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the Cell at the current position. Includes memstore timestamp.</dd></dl>
 </li>
 </ul>
@@ -239,7 +239,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>rewind</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.164">rewind</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.165">rewind</a>()</pre>
 <div class="block">Set position to beginning of given block</div>
 </li>
 </ul>
@@ -249,7 +249,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>next</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.170">next</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.171">next</a>()</pre>
 <div class="block">Move to next position</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true on success, false if there is no more positions.</dd></dl>
 </li>
@@ -260,7 +260,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>seekToKeyInBlock</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.187">seekToKeyInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key,
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.188">seekToKeyInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key,
                    boolean&nbsp;seekBefore)</pre>
 <div class="block">Moves the seeker position within the current block to:
  <ul>
@@ -282,7 +282,7 @@
 <ul class="blockListLast">
 <li class="blockList">
 <h4>compareKey</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.195">compareKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#line.196">compareKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
              <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key)</pre>
 <div class="block">Compare the given key against the current key</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>comparator</code> - </dd><dd><code>key</code> - </dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html b/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html
index 159f89e..c4a7039 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html
@@ -219,7 +219,7 @@ public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/
 <ul class="blockList">
 <li class="blockList">
 <h4>startBlockEncoding</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.51">startBlockEncoding</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;encodingCtx,
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.52">startBlockEncoding</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;encodingCtx,
                       <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts encoding for a block of KeyValues. Call
@@ -236,7 +236,7 @@ public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/
 <ul class="blockList">
 <li class="blockList">
 <h4>encode</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.62">encode</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.63">encode</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
          <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;encodingCtx,
          <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -253,7 +253,7 @@ public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/
 <ul class="blockList">
 <li class="blockList">
 <h4>endBlockEncoding</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.73">endBlockEncoding</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;encodingCtx,
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.74">endBlockEncoding</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;encodingCtx,
                     <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out,
                     byte[]&nbsp;uncompressedBytesWithHeader)
                       throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -270,7 +270,7 @@ public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/
 <ul class="blockList">
 <li class="blockList">
 <h4>decodeKeyValues</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.83">decodeKeyValues</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;source,
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.84">decodeKeyValues</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;source,
                          <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;decodingCtx)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Decode.</div>
@@ -286,7 +286,7 @@ public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/
 <ul class="blockList">
 <li class="blockList">
 <h4>getFirstKeyCellInBlock</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.94">getFirstKeyCellInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;block)</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.95">getFirstKeyCellInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;block)</pre>
 <div class="block">Return first key in block as a cell. Useful for indexing. Typically does not make
  a deep copy but returns a buffer wrapping a segment of the actual block's
  byte array. This is because the first key in block is usually stored
@@ -301,7 +301,7 @@ public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/
 <ul class="blockList">
 <li class="blockList">
 <h4>createSeeker</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder.EncodedSeeker</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.102">createSeeker</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder.EncodedSeeker</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.103">createSeeker</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
                                           <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;decodingCtx)</pre>
 <div class="block">Create a HFileBlock seeker which find KeyValues within a block.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>comparator</code> - what kind of comparison should be used</dd><dd><code>decodingCtx</code> - </dd>
@@ -314,7 +314,7 @@ public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/
 <ul class="blockList">
 <li class="blockList">
 <h4>newDataBlockEncodingContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.117">newDataBlockEncodingContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;encoding,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.118">newDataBlockEncodingContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;encoding,
                                                     byte[]&nbsp;headerBytes,
                                                     <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;meta)</pre>
 <div class="block">Creates a encoder specific encoding context</div>
@@ -329,7 +329,7 @@ public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/
 <ul class="blockListLast">
 <li class="blockList">
 <h4>newDataBlockDecodingContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.128">newDataBlockDecodingContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;meta)</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html#line.129">newDataBlockDecodingContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;meta)</pre>
 <div class="block">Creates an encoder specific decoding context, which will prepare the data
  before actual decoding</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>meta</code> - HFile meta data</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
index 7ccab08..6198ddf 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>private static enum <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.141">CacheConfig.ExternalBlockCaches</a>
+<pre>private static enum <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.147">CacheConfig.ExternalBlockCaches</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile">CacheConfig.ExternalBlockCaches</a>&gt;</pre>
 <div class="block">Enum of all built in external block caches.
  This is used for config.</div>
@@ -214,7 +214,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>memcached</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile">CacheConfig.ExternalBlockCaches</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html#line.142">memcached</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile">CacheConfig.ExternalBlockCaches</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html#line.148">memcached</a></pre>
 </li>
 </ul>
 </li>
@@ -231,7 +231,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>clazz</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html#line.144">clazz</a></pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html#line.150">clazz</a></pre>
 </li>
 </ul>
 </li>
@@ -248,7 +248,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile">CacheConfig.ExternalBlockCaches</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html#line.141">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile">CacheConfig.ExternalBlockCaches</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html#line.147">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -265,7 +265,7 @@ for (CacheConfig.ExternalBlockCaches c : CacheConfig.ExternalBlockCaches.values(
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile">CacheConfig.ExternalBlockCaches</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html#line.141">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile">CacheConfig.ExternalBlockCaches</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html#line.147">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 


[19/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
index d8b6ca7..66dbcf3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
@@ -31,12 +31,12 @@
 <span class="sourceLineNo">023</span>import java.nio.ByteBuffer;<a name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.ByteBufferedCell;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.Cell;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HConstants;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.31"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.Cell;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.31"></a>
 <span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.32"></a>
 <span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.34"></a>
@@ -60,1113 +60,1115 @@
 <span class="sourceLineNo">052</span> */<a name="line.52"></a>
 <span class="sourceLineNo">053</span>@InterfaceAudience.Private<a name="line.53"></a>
 <span class="sourceLineNo">054</span>abstract class BufferedDataBlockEncoder implements DataBlockEncoder {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static int INITIAL_KEY_BUFFER_SIZE = 512;<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  @Override<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public ByteBuffer decodeKeyValues(DataInputStream source,<a name="line.59"></a>
-<span class="sourceLineNo">060</span>      HFileBlockDecodingContext blkDecodingCtx) throws IOException {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      throw new IOException(this.getClass().getName() + " only accepts "<a name="line.62"></a>
-<span class="sourceLineNo">063</span>          + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>    HFileBlockDefaultDecodingContext decodingCtx =<a name="line.66"></a>
-<span class="sourceLineNo">067</span>        (HFileBlockDefaultDecodingContext) blkDecodingCtx;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    if (decodingCtx.getHFileContext().isIncludesTags()<a name="line.68"></a>
-<span class="sourceLineNo">069</span>        &amp;&amp; decodingCtx.getHFileContext().isCompressTags()) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      if (decodingCtx.getTagCompressionContext() != null) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        // It will be overhead to create the TagCompressionContext again and again for every block<a name="line.71"></a>
-<span class="sourceLineNo">072</span>        // decoding.<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        decodingCtx.getTagCompressionContext().clear();<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      } else {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>        try {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>          TagCompressionContext tagCompressionContext = new TagCompressionContext(<a name="line.76"></a>
-<span class="sourceLineNo">077</span>              LRUDictionary.class, Byte.MAX_VALUE);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>          decodingCtx.setTagCompressionContext(tagCompressionContext);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>        } catch (Exception e) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>          throw new IOException("Failed to initialize TagCompressionContext", e);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>        }<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    return internalDecodeKeyValues(source, 0, 0, decodingCtx);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  /********************* common prefixes *************************/<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  // Having this as static is fine but if META is having DBE then we should<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  // change this.<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public static int compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    return Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        left.getRowLength() - rowCommonPrefix, right.getRowArray(), right.getRowOffset()<a name="line.92"></a>
-<span class="sourceLineNo">093</span>            + rowCommonPrefix, right.getRowLength() - rowCommonPrefix);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static int compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + familyCommonPrefix,<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        left.getFamilyLength() - familyCommonPrefix, right.getFamilyArray(),<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        right.getFamilyOffset() + familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public static int compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + qualCommonPrefix,<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        left.getQualifierLength() - qualCommonPrefix, right.getQualifierArray(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        right.getQualifierOffset() + qualCommonPrefix, right.getQualifierLength()<a name="line.105"></a>
-<span class="sourceLineNo">106</span>            - qualCommonPrefix);<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected static class SeekerState {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    protected ByteBuff currentBuffer;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    protected TagCompressionContext tagCompressionContext;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    protected int valueOffset = -1;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    protected int keyLength;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    protected int valueLength;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    protected int lastCommonPrefix;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    protected int tagsLength = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    protected int tagsOffset = -1;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    protected int tagsCompressedLength = 0;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    protected boolean uncompressTags = true;<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    /** We need to store a copy of the key. */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    protected byte[] keyBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    protected byte[] tagsBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    protected long memstoreTS;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    protected int nextKvOffset;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    protected KeyValue.KeyOnlyKeyValue currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    // many object creations.<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    private final ObjectIntPair&lt;ByteBuffer&gt; tmpPair;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    private final boolean includeTags;<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public SeekerState(ObjectIntPair&lt;ByteBuffer&gt; tmpPair, boolean includeTags) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      this.tmpPair = tmpPair;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      this.includeTags = includeTags;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    protected boolean isValid() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      return valueOffset != -1;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    protected void invalidate() {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      valueOffset = -1;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      tagsCompressedLength = 0;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      uncompressTags = true;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      currentBuffer = null;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    protected void ensureSpaceForKey() {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (keyLength &gt; keyBuffer.length) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        // rare case, but we need to handle arbitrary length of key<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        int newKeyBufferLength = Math.max(keyBuffer.length, 1) * 2;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        while (keyLength &gt; newKeyBufferLength) {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>          newKeyBufferLength *= 2;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        }<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        byte[] newKeyBuffer = new byte[newKeyBufferLength];<a name="line.157"></a>
-<span class="sourceLineNo">158</span>        System.arraycopy(keyBuffer, 0, newKeyBuffer, 0, keyBuffer.length);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        keyBuffer = newKeyBuffer;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>    protected void ensureSpaceForTags() {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      if (tagsLength &gt; tagsBuffer.length) {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        // rare case, but we need to handle arbitrary length of tags<a name="line.165"></a>
-<span class="sourceLineNo">166</span>        int newTagsBufferLength = Math.max(tagsBuffer.length, 1) * 2;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        while (tagsLength &gt; newTagsBufferLength) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>          newTagsBufferLength *= 2;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        byte[] newTagsBuffer = new byte[newTagsBufferLength];<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        System.arraycopy(tagsBuffer, 0, newTagsBuffer, 0, tagsBuffer.length);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        tagsBuffer = newTagsBuffer;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    protected void setKey(byte[] keyBuffer, long memTS) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      currentKey.setKey(keyBuffer, 0, keyLength);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      memstoreTS = memTS;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    /**<a name="line.181"></a>
-<span class="sourceLineNo">182</span>     * Copy the state from the next one into this instance (the previous state<a name="line.182"></a>
-<span class="sourceLineNo">183</span>     * placeholder). Used to save the previous state when we are advancing the<a name="line.183"></a>
-<span class="sourceLineNo">184</span>     * seeker to the next key/value.<a name="line.184"></a>
-<span class="sourceLineNo">185</span>     */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    protected void copyFromNext(SeekerState nextState) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      if (keyBuffer.length != nextState.keyBuffer.length) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>        keyBuffer = nextState.keyBuffer.clone();<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      } else if (!isValid()) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        // Note: we can only call isValid before we override our state, so this<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        // comes before all the assignments at the end of this method.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        System.arraycopy(nextState.keyBuffer, 0, keyBuffer, 0,<a name="line.192"></a>
-<span class="sourceLineNo">193</span>             nextState.keyLength);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      } else {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        // don't copy the common prefix between this key and the previous one<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        System.arraycopy(nextState.keyBuffer, nextState.lastCommonPrefix,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>            keyBuffer, nextState.lastCommonPrefix, nextState.keyLength<a name="line.197"></a>
-<span class="sourceLineNo">198</span>                - nextState.lastCommonPrefix);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      }<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      currentKey = nextState.currentKey;<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>      valueOffset = nextState.valueOffset;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      keyLength = nextState.keyLength;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      valueLength = nextState.valueLength;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      lastCommonPrefix = nextState.lastCommonPrefix;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      nextKvOffset = nextState.nextKvOffset;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      memstoreTS = nextState.memstoreTS;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      currentBuffer = nextState.currentBuffer;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      tagsOffset = nextState.tagsOffset;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      tagsLength = nextState.tagsLength;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (nextState.tagCompressionContext != null) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        tagCompressionContext = nextState.tagCompressionContext;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    public Cell toCell() {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      // Buffer backing the value and tags part from the HFileBlock's buffer<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      // When tag compression in use, this will be only the value bytes area.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ByteBuffer valAndTagsBuffer;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      int vOffset;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      int valAndTagsLength = this.valueLength;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      int tagsLenSerializationSize = 0;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      if (this.includeTags &amp;&amp; this.tagCompressionContext == null) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        // Include the tags part also. This will be the tags bytes + 2 bytes of for storing tags<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        // length<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        tagsLenSerializationSize = this.tagsOffset - (this.valueOffset + this.valueLength);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        valAndTagsLength += tagsLenSerializationSize + this.tagsLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      this.currentBuffer.asSubByteBuffer(this.valueOffset, valAndTagsLength, this.tmpPair);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      valAndTagsBuffer = this.tmpPair.getFirst();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      vOffset = this.tmpPair.getSecond();// This is the offset to value part in the BB<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      if (valAndTagsBuffer.hasArray()) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        return toOnheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      } else {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        return toOffheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private Cell toOnheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>        int tagsLenSerializationSize) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      byte[] tagsArray = HConstants.EMPTY_BYTE_ARRAY;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      int tOffset = 0;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      if (this.includeTags) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        if (this.tagCompressionContext == null) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          tagsArray = valAndTagsBuffer.array();<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          tOffset = valAndTagsBuffer.arrayOffset() + vOffset + this.valueLength<a name="line.246"></a>
-<span class="sourceLineNo">247</span>              + tagsLenSerializationSize;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        } else {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          tagsArray = Bytes.copy(tagsBuffer, 0, this.tagsLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          tOffset = 0;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      return new OnheapDecodedCell(Bytes.copy(keyBuffer, 0, this.keyLength),<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer.array(),<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          valAndTagsBuffer.arrayOffset() + vOffset, this.valueLength, memstoreTS, tagsArray,<a name="line.257"></a>
-<span class="sourceLineNo">258</span>          tOffset, this.tagsLength);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>    private Cell toOffheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        int tagsLenSerializationSize) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      ByteBuffer tagsBuf =  HConstants.EMPTY_BYTE_BUFFER;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      int tOffset = 0;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (this.includeTags) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        if (this.tagCompressionContext == null) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          tagsBuf = valAndTagsBuffer;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          tOffset = vOffset + this.valueLength + tagsLenSerializationSize;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        } else {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          tagsBuf = ByteBuffer.wrap(Bytes.copy(tagsBuffer, 0, this.tagsLength));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          tOffset = 0;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return new OffheapDecodedCell(ByteBuffer.wrap(Bytes.copy(keyBuffer, 0, this.keyLength)),<a name="line.274"></a>
-<span class="sourceLineNo">275</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.276"></a>
-<span class="sourceLineNo">277</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer, vOffset,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          this.valueLength, memstoreTS, tagsBuf, tOffset, this.tagsLength);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    }<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * Copies only the key part of the keybuffer by doing a deep copy and passes the<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   * seeker state members for taking a clone.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Note that the value byte[] part is still pointing to the currentBuffer and<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * represented by the valueOffset and valueLength<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  // We return this as a Cell to the upper layers of read flow and might try setting a new SeqId<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  // there. So this has to be an instance of SettableSequenceId.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  protected static class OnheapDecodedCell implements Cell, HeapSize, SettableSequenceId,<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      Streamable {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.293"></a>
-<span class="sourceLineNo">294</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY));<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    private byte[] keyOnlyBuffer;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    private short rowLength;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    private int familyOffset;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    private byte familyLength;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    private int qualifierOffset;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    private int qualifierLength;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    private long timestamp;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    private byte typeByte;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    private byte[] valueBuffer;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    private int valueOffset;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    private int valueLength;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    private byte[] tagsBuffer;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    private int tagsOffset;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    private int tagsLength;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    private long seqId;<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>    protected OnheapDecodedCell(byte[] keyBuffer, short rowLength, int familyOffset,<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        byte[] valueBuffer, int valueOffset, int valueLen, long seqId, byte[] tagsBuffer,<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        int tagsOffset, int tagsLength) {<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      this.keyOnlyBuffer = keyBuffer;<a name="line.315"></a>
-<span class="sourceLineNo">316</span>      this.rowLength = rowLength;<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      this.familyOffset = familyOffset;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.familyLength = familyLength;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>      this.qualifierOffset = qualOffset;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.qualifierLength = qualLength;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>      this.timestamp = timeStamp;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      this.typeByte = typeByte;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>      this.valueBuffer = valueBuffer;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      this.valueOffset = valueOffset;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      this.valueLength = valueLen;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      this.tagsBuffer = tagsBuffer;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      this.tagsOffset = tagsOffset;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      this.tagsLength = tagsLength;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      setSequenceId(seqId);<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>    @Override<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    public byte[] getRowArray() {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      return keyOnlyBuffer;<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    }<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>    @Override<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    public byte[] getFamilyArray() {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      return keyOnlyBuffer;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>    @Override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    public byte[] getQualifierArray() {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>      return keyOnlyBuffer;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
-<span class="sourceLineNo">346</span><a name="line.346"></a>
-<span class="sourceLineNo">347</span>    @Override<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    public int getRowOffset() {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      return Bytes.SIZEOF_SHORT;<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>    @Override<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    public short getRowLength() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      return rowLength;<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>    @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    public int getFamilyOffset() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return familyOffset;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>    @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    public byte getFamilyLength() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      return familyLength;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>    @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    public int getQualifierOffset() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      return qualifierOffset;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    }<a name="line.370"></a>
-<span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>    @Override<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    public int getQualifierLength() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      return qualifierLength;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>    @Override<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    public long getTimestamp() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      return timestamp;<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>    @Override<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    public byte getTypeByte() {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return typeByte;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>    @Override<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    public long getSequenceId() {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      return seqId;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>    @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    public byte[] getValueArray() {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      return this.valueBuffer;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    @Override<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    public int getValueOffset() {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      return valueOffset;<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>    @Override<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    public int getValueLength() {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      return valueLength;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    @Override<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    public byte[] getTagsArray() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      return this.tagsBuffer;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    @Override<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    public int getTagsOffset() {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      return this.tagsOffset;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    }<a name="line.415"></a>
-<span class="sourceLineNo">416</span><a name="line.416"></a>
-<span class="sourceLineNo">417</span>    @Override<a name="line.417"></a>
-<span class="sourceLineNo">418</span>    public int getTagsLength() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      return tagsLength;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    @Override<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public String toString() {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          + getValueLength() + "/seqid=" + seqId;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    public void setSequenceId(long seqId) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      this.seqId = seqId;<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    public long heapSize() {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    public int write(OutputStream out) throws IOException {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      return write(out, true);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>          tagsLength, withTags);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      ByteBufferUtils.putInt(out, keyOnlyBuffer.length);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // Write key<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      out.write(keyOnlyBuffer);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      // Write value<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      out.write(this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      if (withTags) {<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        // 2 bytes tags length followed by tags bytes<a name="line.455"></a>
-<span class="sourceLineNo">456</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        out.write(this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  protected static class OffheapDecodedCell extends ByteBufferedCell implements HeapSize,<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      SettableSequenceId, Streamable {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.BYTE_BUFFER));<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    private ByteBuffer keyBuffer;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    private short rowLength;<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    private int familyOffset;<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    private byte familyLength;<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    private int qualifierOffset;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    private int qualifierLength;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    private long timestamp;<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    private byte typeByte;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    private ByteBuffer valueBuffer;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    private int valueOffset;<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    private int valueLength;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private ByteBuffer tagsBuffer;<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    private int tagsOffset;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    private int tagsLength;<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    private long seqId;<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>    protected OffheapDecodedCell(ByteBuffer keyBuffer, short rowLength, int familyOffset,<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.488"></a>
-<span class="sourceLineNo">489</span>        ByteBuffer valueBuffer, int valueOffset, int valueLen, long seqId, ByteBuffer tagsBuffer,<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        int tagsOffset, int tagsLength) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      // The keyBuffer is always onheap<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      assert keyBuffer.hasArray();<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      assert keyBuffer.arrayOffset() == 0;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      this.keyBuffer = keyBuffer;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      this.rowLength = rowLength;<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      this.familyOffset = familyOffset;<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      this.familyLength = familyLength;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      this.qualifierOffset = qualOffset;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      this.qualifierLength = qualLength;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      this.timestamp = timeStamp;<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      this.typeByte = typeByte;<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      this.valueBuffer = valueBuffer;<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      this.valueOffset = valueOffset;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      this.valueLength = valueLen;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      this.tagsBuffer = tagsBuffer;<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      this.tagsOffset = tagsOffset;<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      this.tagsLength = tagsLength;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      setSequenceId(seqId);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public byte[] getRowArray() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return this.keyBuffer.array();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getRowOffset() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return getRowPosition();<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public short getRowLength() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowLength;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span><a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public byte[] getFamilyArray() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return this.keyBuffer.array();<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public int getFamilyOffset() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return getFamilyPosition();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public byte getFamilyLength() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return this.familyLength;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span><a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public byte[] getQualifierArray() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.keyBuffer.array();<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierOffset() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return getQualifierPosition();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public int getQualifierLength() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.qualifierLength;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public long getTimestamp() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return this.timestamp;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public byte getTypeByte() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.typeByte;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public long getSequenceId() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.seqId;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public byte[] getValueArray() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return CellUtil.cloneValue(this);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public int getValueOffset() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      return 0;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>    @Override<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    public int getValueLength() {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return this.valueLength;<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>    @Override<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    public byte[] getTagsArray() {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      return CellUtil.cloneTags(this);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    }<a name="line.589"></a>
-<span class="sourceLineNo">590</span><a name="line.590"></a>
-<span class="sourceLineNo">591</span>    @Override<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    public int getTagsOffset() {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      return 0;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    }<a name="line.594"></a>
-<span class="sourceLineNo">595</span><a name="line.595"></a>
-<span class="sourceLineNo">596</span>    @Override<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    public int getTagsLength() {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      return this.tagsLength;<a name="line.598"></a>
-<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
-<span class="sourceLineNo">600</span><a name="line.600"></a>
-<span class="sourceLineNo">601</span>    @Override<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    public ByteBuffer getRowByteBuffer() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return this.keyBuffer;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    @Override<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    public int getRowPosition() {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      return Bytes.SIZEOF_SHORT;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    @Override<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      return this.keyBuffer;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>    @Override<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    public int getFamilyPosition() {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      return this.familyOffset;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    }<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    @Override<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      return this.keyBuffer;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    }<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>    @Override<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    public int getQualifierPosition() {<a name="line.627"></a>
-<span class="sourceLineNo">628</span>      return this.qualifierOffset;<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    @Override<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    public ByteBuffer getValueByteBuffer() {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      return this.valueBuffer;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
-<span class="sourceLineNo">635</span><a name="line.635"></a>
-<span class="sourceLineNo">636</span>    @Override<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    public int getValuePosition() {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this.valueOffset;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    @Override<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      return this.tagsBuffer;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
-<span class="sourceLineNo">645</span><a name="line.645"></a>
-<span class="sourceLineNo">646</span>    @Override<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    public int getTagsPosition() {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return this.tagsOffset;<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
-<span class="sourceLineNo">650</span><a name="line.650"></a>
-<span class="sourceLineNo">651</span>    @Override<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    public long heapSize() {<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>    @Override<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    public void setSequenceId(long seqId) {<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      this.seqId = seqId;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span><a name="line.660"></a>
-<span class="sourceLineNo">661</span>    @Override<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    public int write(OutputStream out) throws IOException {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>      return write(out, true);<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    }<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    @Override<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          tagsLength, withTags);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      ByteBufferUtils.putInt(out, keyBuffer.capacity());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      // Write key<a name="line.673"></a>
-<span class="sourceLineNo">674</span>      out.write(keyBuffer.array());<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      // Write value<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      ByteBufferUtils.copyBufferToStream(out, this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      if (withTags) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>        // 2 bytes tags length followed by tags bytes<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.682"></a>
-<span class="sourceLineNo">683</span>        ByteBufferUtils.copyBufferToStream(out, this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  protected abstract static class<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      BufferedEncodedSeeker&lt;STATE extends SeekerState&gt;<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      implements EncodedSeeker {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    protected HFileBlockDecodingContext decodingCtx;<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    protected final CellComparator comparator;<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    protected ByteBuff currentBuffer;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    protected TagCompressionContext tagCompressionContext = null;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    protected  KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    // many object creations.<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    protected final ObjectIntPair&lt;ByteBuffer&gt; tmpPair = new ObjectIntPair&lt;ByteBuffer&gt;();<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    protected STATE current, previous;<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>    public BufferedEncodedSeeker(CellComparator comparator,<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        HFileBlockDecodingContext decodingCtx) {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      this.comparator = comparator;<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.decodingCtx = decodingCtx;<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      if (decodingCtx.getHFileContext().isCompressTags()) {<a name="line.706"></a>
-<span class="sourceLineNo">707</span>        try {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>          tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>        } catch (Exception e) {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>          throw new RuntimeException("Failed to initialize TagCompressionContext", e);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>        }<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      }<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      current = createSeekerState(); // always valid<a name="line.713"></a>
-<span class="sourceLineNo">714</span>      previous = createSeekerState(); // may not be valid<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    }<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>    protected boolean includesMvcc() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>      return this.decodingCtx.getHFileContext().isIncludesMvcc();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    }<a name="line.719"></a>
-<span class="sourceLineNo">720</span><a name="line.720"></a>
-<span class="sourceLineNo">721</span>    protected boolean includesTags() {<a name="line.721"></a>
-<span class="sourceLineNo">722</span>      return this.decodingCtx.getHFileContext().isIncludesTags();<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    }<a name="line.723"></a>
-<span class="sourceLineNo">724</span><a name="line.724"></a>
-<span class="sourceLineNo">725</span>    @Override<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    public int compareKey(CellComparator comparator, Cell key) {<a name="line.726"></a>
-<span class="sourceLineNo">727</span>      keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return comparator.compareKeyIgnoresMvcc(key, keyOnlyKV);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    public void setCurrentBuffer(ByteBuff buffer) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      if (this.tagCompressionContext != null) {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        this.tagCompressionContext.clear();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      currentBuffer = buffer;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>      current.currentBuffer = currentBuffer;<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      if(tagCompressionContext != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>        current.tagCompressionContext = tagCompressionContext;<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>      decodeFirst();<a name="line.741"></a>
-<span class="sourceLineNo">742</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      previous.invalidate();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public Cell getKey() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      byte[] key = new byte[current.keyLength];<a name="line.748"></a>
-<span class="sourceLineNo">749</span>      System.arraycopy(current.keyBuffer, 0, key, 0, current.keyLength);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return new KeyValue.KeyOnlyKeyValue(key);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public ByteBuffer getValueShallowCopy() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      currentBuffer.asSubByteBuffer(current.valueOffset, current.valueLength, tmpPair);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      ByteBuffer dup = tmpPair.getFirst().duplicate();<a name="line.756"></a>
-<span class="sourceLineNo">757</span>      dup.position(tmpPair.getSecond());<a name="line.757"></a>
-<span class="sourceLineNo">758</span>      dup.limit(tmpPair.getSecond() + current.valueLength);<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      return dup.slice();<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    public Cell getCell() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return current.toCell();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span><a name="line.766"></a>
-<span class="sourceLineNo">767</span>    @Override<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    public void rewind() {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      currentBuffer.rewind();<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      if (tagCompressionContext != null) {<a name="line.770"></a>
-<span class="sourceLineNo">771</span>        tagCompressionContext.clear();<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      }<a name="line.772"></a>
-<span class="sourceLineNo">773</span>      decodeFirst();<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      previous.invalidate();<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public boolean next() {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      if (!currentBuffer.hasRemaining()) {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        return false;<a name="line.781"></a>
-<span class="sourceLineNo">782</span>      }<a name="line.782"></a>
-<span class="sourceLineNo">783</span>      decodeNext();<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      previous.invalidate();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>      return true;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span><a name="line.788"></a>
-<span class="sourceLineNo">789</span>    protected void decodeTags() {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      current.tagsLength = ByteBuff.readCompressedInt(currentBuffer);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      if (tagCompressionContext != null) {<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        if (current.uncompressTags) {<a name="line.792"></a>
-<span class="sourceLineNo">793</span>          // Tag compression is been used. uncompress it into tagsBuffer<a name="line.793"></a>
-<span class="sourceLineNo">794</span>          current.ensureSpaceForTags();<a name="line.794"></a>
-<span class="sourceLineNo">795</span>          try {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            current.tagsCompressedLength = tagCompressionContext.uncompressTags(currentBuffer,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>                current.tagsBuffer, 0, current.tagsLength);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>          } catch (IOException e) {<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            throw new RuntimeException("Exception while uncompressing tags", e);<a name="line.799"></a>
-<span class="sourceLineNo">800</span>          }<a name="line.800"></a>
-<span class="sourceLineNo">801</span>        } else {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>          currentBuffer.skip(current.tagsCompressedLength);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>          current.uncompressTags = true;// Reset this.<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        }<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        current.tagsOffset = -1;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>      } else {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        // When tag compress is not used, let us not do copying of tags bytes into tagsBuffer.<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // Just mark the tags Offset so as to create the KV buffer later in getKeyValueBuffer()<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        current.tagsOffset = currentBuffer.position();<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        currentBuffer.skip(current.tagsLength);<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      }<a name="line.811"></a>
-<span class="sourceLineNo">812</span>    }<a name="line.812"></a>
-<span class="sourceLineNo">813</span><a name="line.813"></a>
-<span class="sourceLineNo">814</span>    @Override<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      int rowCommonPrefix = 0;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      int familyCommonPrefix = 0;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      int qualCommonPrefix = 0;<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      previous.invalidate();<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      do {<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        int comp;<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.822"></a>
-<span class="sourceLineNo">823</span>        if (current.lastCommonPrefix != 0) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>          // The KV format has row key length also in the byte array. The<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          // common prefix<a name="line.825"></a>
-<span class="sourceLineNo">826</span>          // includes it. So we need to subtract to find out the common prefix<a name="line.826"></a>
-<span class="sourceLineNo">827</span>          // in the<a name="line.827"></a>
-<span class="sourceLineNo">828</span>          // row part alone<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          rowCommonPrefix = Math.min(rowCommonPrefix, current.lastCommonPrefix - 2);<a name="line.829"></a>
-<span class="sourceLineNo">830</span>        }<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        if (current.lastCommonPrefix &lt;= 2) {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>          rowCommonPrefix = 0;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>        }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        rowCommonPrefix += findCommonPrefixInRowPart(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        comp = compareCommonRowPrefix(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        if (comp == 0) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          comp = compareTypeBytes(seekCell, keyOnlyKV);<a name="line.837"></a>
-<span class="sourceLineNo">838</span>          if (comp == 0) {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>            // Subtract the fixed row key length and the family key fixed length<a name="line.839"></a>
-<span class="sourceLineNo">840</span>            familyCommonPrefix = Math.max(<a name="line.840"></a>
-<span class="sourceLineNo">841</span>                0,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>                Math.min(familyCommonPrefix,<a name="line.842"></a>
-<span class="sourceLineNo">843</span>                    current.lastCommonPrefix - (3 + keyOnlyKV.getRowLength())));<a name="line.843"></a>
-<span class="sourceLineNo">844</span>            familyCommonPrefix += findCommonPrefixInFamilyPart(seekCell, keyOnlyKV,<a name="line.844"></a>
-<span class="sourceLineNo">845</span>                familyCommonPrefix);<a name="line.845"></a>
-<span class="sourceLineNo">846</span>            comp = compareCommonFamilyPrefix(seekCell, keyOnlyKV, familyCommonPrefix);<a name="line.846"></a>
-<span class="sourceLineNo">847</span>            if (comp == 0) {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>              // subtract the rowkey fixed length and the family key fixed<a name="line.848"></a>
-<span class="sourceLineNo">849</span>              // length<a name="line.849"></a>
-<span class="sourceLineNo">850</span>              qualCommonPrefix = Math.max(<a name="line.850"></a>
-<span class="sourceLineNo">851</span>                  0,<a name="line.851"></a>
-<span class="sourceLineNo">852</span>                  Math.min(<a name="line.852"></a>
-<span class="sourceLineNo">853</span>                      qualCommonPrefix,<a name="line.853"></a>
-<span class="sourceLineNo">854</span>                      current.lastCommonPrefix<a name="line.854"></a>
-<span class="sourceLineNo">855</span>                          - (3 + keyOnlyKV.getRowLength() + keyOnlyKV.getFamilyLength())));<a name="line.855"></a>
-<span class="sourceLineNo">856</span>              qualCommonPrefix += findCommonPrefixInQualifierPart(seekCell, keyOnlyKV,<a name="line.856"></a>
-<span class="sourceLineNo">857</span>                  qualCommonPrefix);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>              comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>              if (comp == 0) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>                comp = CellComparator.compareTimestamps(seekCell, keyOnlyKV);<a name="line.860"></a>
-<span class="sourceLineNo">861</span>                if (comp == 0) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>                  // Compare types. Let the delete types sort ahead of puts;<a name="line.862"></a>
-<span class="sourceLineNo">863</span>                  // i.e. types<a name="line.863"></a>
-<span class="sourceLineNo">864</span>                  // of higher numbers sort before those of lesser numbers.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>                  // Maximum<a name="line.865"></a>
-<span class="sourceLineNo">866</span>                  // (255)<a name="line.866"></a>
-<span class="sourceLineNo">867</span>                  // appears ahead of everything, and minimum (0) appears<a name="line.867"></a>
-<span class="sourceLineNo">868</span>                  // after<a name="line.868"></a>
-<span class="sourceLineNo">869</span>                  // everything.<a name="line.869"></a>
-<span class="sourceLineNo">870</span>                  comp = (0xff &amp; keyOnlyKV.getTypeByte()) - (0xff &amp; seekCell.getTypeByte());<a name="line.870"></a>
-<span class="sourceLineNo">871</span>                }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>              }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            }<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        if (comp == 0) { // exact match<a name="line.876"></a>
-<span class="sourceLineNo">877</span>          if (seekBefore) {<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            if (!previous.isValid()) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>              // The caller (seekBefore) has to ensure that we are not at the<a name="line.879"></a>
-<span class="sourceLineNo">880</span>              // first key in the block.<a name="line.880"></a>
-<span class="sourceLineNo">881</span>              throw new IllegalStateException("Cannot seekBefore if "<a name="line.881"></a>
-<span class="sourceLineNo">882</span>                  + "positioned at the first key in the block: key="<a name="line.882"></a>
-<span class="sourceLineNo">883</span>                  + Bytes.toStringBinary(seekCell.getRowArray()));<a name="line.883"></a>
-<span class="sourceLineNo">884</span>            }<a name="line.884"></a>
-<span class="sourceLineNo">885</span>            moveToPrevious();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>            return 1;<a name="line.886"></a>
-<span class="sourceLineNo">887</span>          }<a name="line.887"></a>
-<span class="sourceLineNo">888</span>          return 0;<a name="line.888"></a>
-<span class="sourceLineNo">889</span>        }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>        if (comp &lt; 0) { // already too large, check previous<a name="line.891"></a>
-<span class="sourceLineNo">892</span>          if (previous.isValid()) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>            moveToPrevious();<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          } else {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>            return HConstants.INDEX_KEY_MAGIC; // using optimized index key<a name="line.895"></a>
-<span class="sourceLineNo">896</span>          }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>          return 1;<a name="line.897"></a>
-<span class="sourceLineNo">898</span>        }<a name="line.898"></a>
-<span class="sourceLineNo">899</span><a name="line.899"></a>
-<span class="sourceLineNo">900</span>        // move to next, if more data is available<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        if (currentBuffer.hasRemaining()) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          previous.copyFromNext(current);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>          decodeNext();<a name="line.903"></a>
-<span class="sourceLineNo">904</span>          current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.904"></a>
-<span class="sourceLineNo">905</span>        } else {<a name="line.905"></a>
-<span class="sourceLineNo">906</span>          break;<a name="line.906"></a>
-<span class="sourceLineNo">907</span>        }<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      } while (true);<a name="line.908"></a>
-<span class="sourceLineNo">909</span><a name="line.909"></a>
-<span class="sourceLineNo">910</span>      // we hit the end of the block, not an exact match<a name="line.910"></a>
-<span class="sourceLineNo">911</span>      return 1;<a name="line.911"></a>
-<span class="sourceLineNo">912</span>    }<a name="line.912"></a>
-<span class="sourceLineNo">913</span><a name="line.913"></a>
-<span class="sourceLineNo">914</span>    private int compareTypeBytes(Cell key, Cell right) {<a name="line.914"></a>
-<span class="sourceLineNo">915</span>      if (key.getFamilyLength() + key.getQualifierLength() == 0<a name="line.915"></a>
-<span class="sourceLineNo">916</span>          &amp;&amp; key.getTypeByte() == Type.Minimum.getCode()) {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>        // left is "bigger", i.e. it appears later in the sorted order<a name="line.917"></a>
-<span class="sourceLineNo">918</span>        return 1;<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      }<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      if (right.getFamilyLength() + right.getQualifierLength() == 0<a name="line.920"></a>
-<span class="sourceLineNo">921</span>          &amp;&amp; right.getTypeByte() == Type.Minimum.getCode()) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>        return -1;<a name="line.922"></a>
-<span class="sourceLineNo">923</span>      }<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      return 0;<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span><a name="line.926"></a>
-<span class="sourceLineNo">927</span>    private static int findCommonPrefixInRowPart(Cell left, Cell right, int rowCommonPrefix) {<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      return Bytes.findCommonPrefix(left.getRowArray(), right.getRowArray(), left.getRowLength()<a name="line.928"></a>
-<span class="sourceLineNo">929</span>          - rowCommonPrefix, right.getRowLength() - rowCommonPrefix, left.getRowOffset()<a name="line.929"></a>
-<span class="sourceLineNo">930</span>          + rowCommonPrefix, right.getRowOffset() + rowCommonPrefix);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    }<a name="line.931"></a>
-<span class="sourceLineNo">932</span><a name="line.932"></a>
-<span class="sourceLineNo">933</span>    private static int findCommonPrefixInFamilyPart(Cell left, Cell right, int familyCommonPrefix) {<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      return Bytes<a name="line.934"></a>
-<span class="sourceLineNo">935</span>          .findCommonPrefix(left.getFamilyArray(), right.getFamilyArray(), left.getFamilyLength()<a name="line.935"></a>
-<span class="sourceLineNo">936</span>              - familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix,<a name="line.936"></a>
-<span class="sourceLineNo">937</span>              left.getFamilyOffset() + familyCommonPrefix, right.getFamilyOffset()<a name="line.937"></a>
-<span class="sourceLineNo">938</span>                  + familyCommonPrefix);<a name="line.938"></a>
-<span class="sourceLineNo">939</span>    }<a name="line.939"></a>
-<span class="sourceLineNo">940</span><a name="line.940"></a>
-<span class="sourceLineNo">941</span>    private static int findCommonPrefixInQualifierPart(Cell left, Cell right,<a name="line.941"></a>
-<span class="sourceLineNo">942</span>        int qualifierCommonPrefix) {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      return Bytes.findCommonPrefix(left.getQualifierArray(), right.getQualifierArray(),<a name="line.943"></a>
-<span class="sourceLineNo">944</span>          left.getQualifierLength() - qualifierCommonPrefix, right.getQualifierLength()<a name="line.944"></a>
-<span class="sourceLineNo">945</span>              - qualifierCommonPrefix, left.getQualifierOffset() + qualifierCommonPrefix,<a name="line.945"></a>
-<span class="sourceLineNo">946</span>          right.getQualifierOffset() + qualifierCommonPrefix);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    private void moveToPrevious() {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!previous.isValid()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        throw new IllegalStateException(<a name="line.951"></a>
-<span class="sourceLineNo">952</span>            "Can move back only once and not in first key in the block.");<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      }<a name="line.953"></a>
-<span class="sourceLineNo">954</span><a name="line.954"></a>
-<span class="sourceLineNo">955</span>      STATE tmp = previous;<a name="line.955"></a>
-<span class="sourceLineNo">956</span>      previous = current;<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      current = tmp;<a name="line.957"></a>
-<span class="sourceLineNo">958</span><a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // move after last key value<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      currentBuffer.position(cur

<TRUNCATED>

[18/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
index d8b6ca7..66dbcf3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
@@ -31,12 +31,12 @@
 <span class="sourceLineNo">023</span>import java.nio.ByteBuffer;<a name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.ByteBufferedCell;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.Cell;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HConstants;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.31"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.Cell;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.31"></a>
 <span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.32"></a>
 <span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.34"></a>
@@ -60,1113 +60,1115 @@
 <span class="sourceLineNo">052</span> */<a name="line.52"></a>
 <span class="sourceLineNo">053</span>@InterfaceAudience.Private<a name="line.53"></a>
 <span class="sourceLineNo">054</span>abstract class BufferedDataBlockEncoder implements DataBlockEncoder {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static int INITIAL_KEY_BUFFER_SIZE = 512;<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  @Override<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public ByteBuffer decodeKeyValues(DataInputStream source,<a name="line.59"></a>
-<span class="sourceLineNo">060</span>      HFileBlockDecodingContext blkDecodingCtx) throws IOException {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      throw new IOException(this.getClass().getName() + " only accepts "<a name="line.62"></a>
-<span class="sourceLineNo">063</span>          + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>    HFileBlockDefaultDecodingContext decodingCtx =<a name="line.66"></a>
-<span class="sourceLineNo">067</span>        (HFileBlockDefaultDecodingContext) blkDecodingCtx;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    if (decodingCtx.getHFileContext().isIncludesTags()<a name="line.68"></a>
-<span class="sourceLineNo">069</span>        &amp;&amp; decodingCtx.getHFileContext().isCompressTags()) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      if (decodingCtx.getTagCompressionContext() != null) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        // It will be overhead to create the TagCompressionContext again and again for every block<a name="line.71"></a>
-<span class="sourceLineNo">072</span>        // decoding.<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        decodingCtx.getTagCompressionContext().clear();<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      } else {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>        try {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>          TagCompressionContext tagCompressionContext = new TagCompressionContext(<a name="line.76"></a>
-<span class="sourceLineNo">077</span>              LRUDictionary.class, Byte.MAX_VALUE);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>          decodingCtx.setTagCompressionContext(tagCompressionContext);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>        } catch (Exception e) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>          throw new IOException("Failed to initialize TagCompressionContext", e);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>        }<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    return internalDecodeKeyValues(source, 0, 0, decodingCtx);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  /********************* common prefixes *************************/<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  // Having this as static is fine but if META is having DBE then we should<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  // change this.<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public static int compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    return Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        left.getRowLength() - rowCommonPrefix, right.getRowArray(), right.getRowOffset()<a name="line.92"></a>
-<span class="sourceLineNo">093</span>            + rowCommonPrefix, right.getRowLength() - rowCommonPrefix);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static int compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + familyCommonPrefix,<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        left.getFamilyLength() - familyCommonPrefix, right.getFamilyArray(),<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        right.getFamilyOffset() + familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public static int compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + qualCommonPrefix,<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        left.getQualifierLength() - qualCommonPrefix, right.getQualifierArray(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        right.getQualifierOffset() + qualCommonPrefix, right.getQualifierLength()<a name="line.105"></a>
-<span class="sourceLineNo">106</span>            - qualCommonPrefix);<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected static class SeekerState {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    protected ByteBuff currentBuffer;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    protected TagCompressionContext tagCompressionContext;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    protected int valueOffset = -1;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    protected int keyLength;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    protected int valueLength;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    protected int lastCommonPrefix;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    protected int tagsLength = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    protected int tagsOffset = -1;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    protected int tagsCompressedLength = 0;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    protected boolean uncompressTags = true;<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    /** We need to store a copy of the key. */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    protected byte[] keyBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    protected byte[] tagsBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    protected long memstoreTS;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    protected int nextKvOffset;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    protected KeyValue.KeyOnlyKeyValue currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    // many object creations.<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    private final ObjectIntPair&lt;ByteBuffer&gt; tmpPair;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    private final boolean includeTags;<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public SeekerState(ObjectIntPair&lt;ByteBuffer&gt; tmpPair, boolean includeTags) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      this.tmpPair = tmpPair;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      this.includeTags = includeTags;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    protected boolean isValid() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      return valueOffset != -1;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    protected void invalidate() {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      valueOffset = -1;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      tagsCompressedLength = 0;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      uncompressTags = true;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      currentBuffer = null;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    protected void ensureSpaceForKey() {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (keyLength &gt; keyBuffer.length) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        // rare case, but we need to handle arbitrary length of key<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        int newKeyBufferLength = Math.max(keyBuffer.length, 1) * 2;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        while (keyLength &gt; newKeyBufferLength) {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>          newKeyBufferLength *= 2;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        }<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        byte[] newKeyBuffer = new byte[newKeyBufferLength];<a name="line.157"></a>
-<span class="sourceLineNo">158</span>        System.arraycopy(keyBuffer, 0, newKeyBuffer, 0, keyBuffer.length);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        keyBuffer = newKeyBuffer;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>    protected void ensureSpaceForTags() {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      if (tagsLength &gt; tagsBuffer.length) {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        // rare case, but we need to handle arbitrary length of tags<a name="line.165"></a>
-<span class="sourceLineNo">166</span>        int newTagsBufferLength = Math.max(tagsBuffer.length, 1) * 2;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        while (tagsLength &gt; newTagsBufferLength) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>          newTagsBufferLength *= 2;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        byte[] newTagsBuffer = new byte[newTagsBufferLength];<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        System.arraycopy(tagsBuffer, 0, newTagsBuffer, 0, tagsBuffer.length);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        tagsBuffer = newTagsBuffer;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    protected void setKey(byte[] keyBuffer, long memTS) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      currentKey.setKey(keyBuffer, 0, keyLength);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      memstoreTS = memTS;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    /**<a name="line.181"></a>
-<span class="sourceLineNo">182</span>     * Copy the state from the next one into this instance (the previous state<a name="line.182"></a>
-<span class="sourceLineNo">183</span>     * placeholder). Used to save the previous state when we are advancing the<a name="line.183"></a>
-<span class="sourceLineNo">184</span>     * seeker to the next key/value.<a name="line.184"></a>
-<span class="sourceLineNo">185</span>     */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    protected void copyFromNext(SeekerState nextState) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      if (keyBuffer.length != nextState.keyBuffer.length) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>        keyBuffer = nextState.keyBuffer.clone();<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      } else if (!isValid()) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        // Note: we can only call isValid before we override our state, so this<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        // comes before all the assignments at the end of this method.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        System.arraycopy(nextState.keyBuffer, 0, keyBuffer, 0,<a name="line.192"></a>
-<span class="sourceLineNo">193</span>             nextState.keyLength);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      } else {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        // don't copy the common prefix between this key and the previous one<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        System.arraycopy(nextState.keyBuffer, nextState.lastCommonPrefix,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>            keyBuffer, nextState.lastCommonPrefix, nextState.keyLength<a name="line.197"></a>
-<span class="sourceLineNo">198</span>                - nextState.lastCommonPrefix);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      }<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      currentKey = nextState.currentKey;<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>      valueOffset = nextState.valueOffset;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      keyLength = nextState.keyLength;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      valueLength = nextState.valueLength;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      lastCommonPrefix = nextState.lastCommonPrefix;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      nextKvOffset = nextState.nextKvOffset;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      memstoreTS = nextState.memstoreTS;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      currentBuffer = nextState.currentBuffer;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      tagsOffset = nextState.tagsOffset;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      tagsLength = nextState.tagsLength;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (nextState.tagCompressionContext != null) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        tagCompressionContext = nextState.tagCompressionContext;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    public Cell toCell() {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      // Buffer backing the value and tags part from the HFileBlock's buffer<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      // When tag compression in use, this will be only the value bytes area.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ByteBuffer valAndTagsBuffer;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      int vOffset;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      int valAndTagsLength = this.valueLength;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      int tagsLenSerializationSize = 0;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      if (this.includeTags &amp;&amp; this.tagCompressionContext == null) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        // Include the tags part also. This will be the tags bytes + 2 bytes of for storing tags<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        // length<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        tagsLenSerializationSize = this.tagsOffset - (this.valueOffset + this.valueLength);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        valAndTagsLength += tagsLenSerializationSize + this.tagsLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      this.currentBuffer.asSubByteBuffer(this.valueOffset, valAndTagsLength, this.tmpPair);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      valAndTagsBuffer = this.tmpPair.getFirst();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      vOffset = this.tmpPair.getSecond();// This is the offset to value part in the BB<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      if (valAndTagsBuffer.hasArray()) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        return toOnheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      } else {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        return toOffheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private Cell toOnheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>        int tagsLenSerializationSize) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      byte[] tagsArray = HConstants.EMPTY_BYTE_ARRAY;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      int tOffset = 0;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      if (this.includeTags) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        if (this.tagCompressionContext == null) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          tagsArray = valAndTagsBuffer.array();<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          tOffset = valAndTagsBuffer.arrayOffset() + vOffset + this.valueLength<a name="line.246"></a>
-<span class="sourceLineNo">247</span>              + tagsLenSerializationSize;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        } else {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          tagsArray = Bytes.copy(tagsBuffer, 0, this.tagsLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          tOffset = 0;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      return new OnheapDecodedCell(Bytes.copy(keyBuffer, 0, this.keyLength),<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer.array(),<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          valAndTagsBuffer.arrayOffset() + vOffset, this.valueLength, memstoreTS, tagsArray,<a name="line.257"></a>
-<span class="sourceLineNo">258</span>          tOffset, this.tagsLength);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>    private Cell toOffheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        int tagsLenSerializationSize) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      ByteBuffer tagsBuf =  HConstants.EMPTY_BYTE_BUFFER;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      int tOffset = 0;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (this.includeTags) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        if (this.tagCompressionContext == null) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          tagsBuf = valAndTagsBuffer;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          tOffset = vOffset + this.valueLength + tagsLenSerializationSize;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        } else {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          tagsBuf = ByteBuffer.wrap(Bytes.copy(tagsBuffer, 0, this.tagsLength));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          tOffset = 0;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return new OffheapDecodedCell(ByteBuffer.wrap(Bytes.copy(keyBuffer, 0, this.keyLength)),<a name="line.274"></a>
-<span class="sourceLineNo">275</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.276"></a>
-<span class="sourceLineNo">277</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer, vOffset,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          this.valueLength, memstoreTS, tagsBuf, tOffset, this.tagsLength);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    }<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * Copies only the key part of the keybuffer by doing a deep copy and passes the<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   * seeker state members for taking a clone.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Note that the value byte[] part is still pointing to the currentBuffer and<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * represented by the valueOffset and valueLength<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  // We return this as a Cell to the upper layers of read flow and might try setting a new SeqId<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  // there. So this has to be an instance of SettableSequenceId.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  protected static class OnheapDecodedCell implements Cell, HeapSize, SettableSequenceId,<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      Streamable {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.293"></a>
-<span class="sourceLineNo">294</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY));<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    private byte[] keyOnlyBuffer;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    private short rowLength;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    private int familyOffset;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    private byte familyLength;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    private int qualifierOffset;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    private int qualifierLength;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    private long timestamp;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    private byte typeByte;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    private byte[] valueBuffer;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    private int valueOffset;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    private int valueLength;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    private byte[] tagsBuffer;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    private int tagsOffset;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    private int tagsLength;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    private long seqId;<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>    protected OnheapDecodedCell(byte[] keyBuffer, short rowLength, int familyOffset,<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        byte[] valueBuffer, int valueOffset, int valueLen, long seqId, byte[] tagsBuffer,<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        int tagsOffset, int tagsLength) {<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      this.keyOnlyBuffer = keyBuffer;<a name="line.315"></a>
-<span class="sourceLineNo">316</span>      this.rowLength = rowLength;<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      this.familyOffset = familyOffset;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.familyLength = familyLength;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>      this.qualifierOffset = qualOffset;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.qualifierLength = qualLength;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>      this.timestamp = timeStamp;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      this.typeByte = typeByte;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>      this.valueBuffer = valueBuffer;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      this.valueOffset = valueOffset;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      this.valueLength = valueLen;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      this.tagsBuffer = tagsBuffer;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      this.tagsOffset = tagsOffset;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      this.tagsLength = tagsLength;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      setSequenceId(seqId);<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>    @Override<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    public byte[] getRowArray() {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      return keyOnlyBuffer;<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    }<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>    @Override<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    public byte[] getFamilyArray() {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      return keyOnlyBuffer;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>    @Override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    public byte[] getQualifierArray() {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>      return keyOnlyBuffer;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
-<span class="sourceLineNo">346</span><a name="line.346"></a>
-<span class="sourceLineNo">347</span>    @Override<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    public int getRowOffset() {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      return Bytes.SIZEOF_SHORT;<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>    @Override<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    public short getRowLength() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      return rowLength;<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>    @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    public int getFamilyOffset() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return familyOffset;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>    @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    public byte getFamilyLength() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      return familyLength;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>    @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    public int getQualifierOffset() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      return qualifierOffset;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    }<a name="line.370"></a>
-<span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>    @Override<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    public int getQualifierLength() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      return qualifierLength;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>    @Override<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    public long getTimestamp() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      return timestamp;<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>    @Override<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    public byte getTypeByte() {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return typeByte;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>    @Override<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    public long getSequenceId() {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      return seqId;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>    @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    public byte[] getValueArray() {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      return this.valueBuffer;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    @Override<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    public int getValueOffset() {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      return valueOffset;<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>    @Override<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    public int getValueLength() {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      return valueLength;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    @Override<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    public byte[] getTagsArray() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      return this.tagsBuffer;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    @Override<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    public int getTagsOffset() {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      return this.tagsOffset;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    }<a name="line.415"></a>
-<span class="sourceLineNo">416</span><a name="line.416"></a>
-<span class="sourceLineNo">417</span>    @Override<a name="line.417"></a>
-<span class="sourceLineNo">418</span>    public int getTagsLength() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      return tagsLength;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    @Override<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public String toString() {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          + getValueLength() + "/seqid=" + seqId;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    public void setSequenceId(long seqId) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      this.seqId = seqId;<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    public long heapSize() {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    public int write(OutputStream out) throws IOException {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      return write(out, true);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>          tagsLength, withTags);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      ByteBufferUtils.putInt(out, keyOnlyBuffer.length);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // Write key<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      out.write(keyOnlyBuffer);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      // Write value<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      out.write(this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      if (withTags) {<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        // 2 bytes tags length followed by tags bytes<a name="line.455"></a>
-<span class="sourceLineNo">456</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        out.write(this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  protected static class OffheapDecodedCell extends ByteBufferedCell implements HeapSize,<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      SettableSequenceId, Streamable {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.BYTE_BUFFER));<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    private ByteBuffer keyBuffer;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    private short rowLength;<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    private int familyOffset;<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    private byte familyLength;<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    private int qualifierOffset;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    private int qualifierLength;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    private long timestamp;<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    private byte typeByte;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    private ByteBuffer valueBuffer;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    private int valueOffset;<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    private int valueLength;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private ByteBuffer tagsBuffer;<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    private int tagsOffset;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    private int tagsLength;<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    private long seqId;<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>    protected OffheapDecodedCell(ByteBuffer keyBuffer, short rowLength, int familyOffset,<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.488"></a>
-<span class="sourceLineNo">489</span>        ByteBuffer valueBuffer, int valueOffset, int valueLen, long seqId, ByteBuffer tagsBuffer,<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        int tagsOffset, int tagsLength) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      // The keyBuffer is always onheap<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      assert keyBuffer.hasArray();<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      assert keyBuffer.arrayOffset() == 0;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      this.keyBuffer = keyBuffer;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      this.rowLength = rowLength;<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      this.familyOffset = familyOffset;<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      this.familyLength = familyLength;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      this.qualifierOffset = qualOffset;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      this.qualifierLength = qualLength;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      this.timestamp = timeStamp;<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      this.typeByte = typeByte;<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      this.valueBuffer = valueBuffer;<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      this.valueOffset = valueOffset;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      this.valueLength = valueLen;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      this.tagsBuffer = tagsBuffer;<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      this.tagsOffset = tagsOffset;<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      this.tagsLength = tagsLength;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      setSequenceId(seqId);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public byte[] getRowArray() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return this.keyBuffer.array();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getRowOffset() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return getRowPosition();<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public short getRowLength() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowLength;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span><a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public byte[] getFamilyArray() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return this.keyBuffer.array();<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public int getFamilyOffset() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return getFamilyPosition();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public byte getFamilyLength() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return this.familyLength;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span><a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public byte[] getQualifierArray() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.keyBuffer.array();<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierOffset() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return getQualifierPosition();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public int getQualifierLength() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.qualifierLength;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public long getTimestamp() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return this.timestamp;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public byte getTypeByte() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.typeByte;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public long getSequenceId() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.seqId;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public byte[] getValueArray() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return CellUtil.cloneValue(this);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public int getValueOffset() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      return 0;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>    @Override<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    public int getValueLength() {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return this.valueLength;<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>    @Override<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    public byte[] getTagsArray() {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      return CellUtil.cloneTags(this);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    }<a name="line.589"></a>
-<span class="sourceLineNo">590</span><a name="line.590"></a>
-<span class="sourceLineNo">591</span>    @Override<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    public int getTagsOffset() {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      return 0;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    }<a name="line.594"></a>
-<span class="sourceLineNo">595</span><a name="line.595"></a>
-<span class="sourceLineNo">596</span>    @Override<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    public int getTagsLength() {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      return this.tagsLength;<a name="line.598"></a>
-<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
-<span class="sourceLineNo">600</span><a name="line.600"></a>
-<span class="sourceLineNo">601</span>    @Override<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    public ByteBuffer getRowByteBuffer() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return this.keyBuffer;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    @Override<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    public int getRowPosition() {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      return Bytes.SIZEOF_SHORT;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    @Override<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      return this.keyBuffer;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>    @Override<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    public int getFamilyPosition() {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      return this.familyOffset;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    }<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    @Override<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      return this.keyBuffer;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    }<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>    @Override<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    public int getQualifierPosition() {<a name="line.627"></a>
-<span class="sourceLineNo">628</span>      return this.qualifierOffset;<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    @Override<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    public ByteBuffer getValueByteBuffer() {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      return this.valueBuffer;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
-<span class="sourceLineNo">635</span><a name="line.635"></a>
-<span class="sourceLineNo">636</span>    @Override<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    public int getValuePosition() {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this.valueOffset;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    @Override<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      return this.tagsBuffer;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
-<span class="sourceLineNo">645</span><a name="line.645"></a>
-<span class="sourceLineNo">646</span>    @Override<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    public int getTagsPosition() {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return this.tagsOffset;<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
-<span class="sourceLineNo">650</span><a name="line.650"></a>
-<span class="sourceLineNo">651</span>    @Override<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    public long heapSize() {<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>    @Override<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    public void setSequenceId(long seqId) {<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      this.seqId = seqId;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span><a name="line.660"></a>
-<span class="sourceLineNo">661</span>    @Override<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    public int write(OutputStream out) throws IOException {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>      return write(out, true);<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    }<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    @Override<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          tagsLength, withTags);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      ByteBufferUtils.putInt(out, keyBuffer.capacity());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      // Write key<a name="line.673"></a>
-<span class="sourceLineNo">674</span>      out.write(keyBuffer.array());<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      // Write value<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      ByteBufferUtils.copyBufferToStream(out, this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      if (withTags) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>        // 2 bytes tags length followed by tags bytes<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.682"></a>
-<span class="sourceLineNo">683</span>        ByteBufferUtils.copyBufferToStream(out, this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  protected abstract static class<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      BufferedEncodedSeeker&lt;STATE extends SeekerState&gt;<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      implements EncodedSeeker {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    protected HFileBlockDecodingContext decodingCtx;<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    protected final CellComparator comparator;<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    protected ByteBuff currentBuffer;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    protected TagCompressionContext tagCompressionContext = null;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    protected  KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    // many object creations.<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    protected final ObjectIntPair&lt;ByteBuffer&gt; tmpPair = new ObjectIntPair&lt;ByteBuffer&gt;();<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    protected STATE current, previous;<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>    public BufferedEncodedSeeker(CellComparator comparator,<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        HFileBlockDecodingContext decodingCtx) {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      this.comparator = comparator;<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.decodingCtx = decodingCtx;<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      if (decodingCtx.getHFileContext().isCompressTags()) {<a name="line.706"></a>
-<span class="sourceLineNo">707</span>        try {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>          tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>        } catch (Exception e) {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>          throw new RuntimeException("Failed to initialize TagCompressionContext", e);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>        }<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      }<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      current = createSeekerState(); // always valid<a name="line.713"></a>
-<span class="sourceLineNo">714</span>      previous = createSeekerState(); // may not be valid<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    }<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>    protected boolean includesMvcc() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>      return this.decodingCtx.getHFileContext().isIncludesMvcc();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    }<a name="line.719"></a>
-<span class="sourceLineNo">720</span><a name="line.720"></a>
-<span class="sourceLineNo">721</span>    protected boolean includesTags() {<a name="line.721"></a>
-<span class="sourceLineNo">722</span>      return this.decodingCtx.getHFileContext().isIncludesTags();<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    }<a name="line.723"></a>
-<span class="sourceLineNo">724</span><a name="line.724"></a>
-<span class="sourceLineNo">725</span>    @Override<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    public int compareKey(CellComparator comparator, Cell key) {<a name="line.726"></a>
-<span class="sourceLineNo">727</span>      keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return comparator.compareKeyIgnoresMvcc(key, keyOnlyKV);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    public void setCurrentBuffer(ByteBuff buffer) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      if (this.tagCompressionContext != null) {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        this.tagCompressionContext.clear();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      currentBuffer = buffer;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>      current.currentBuffer = currentBuffer;<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      if(tagCompressionContext != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>        current.tagCompressionContext = tagCompressionContext;<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>      decodeFirst();<a name="line.741"></a>
-<span class="sourceLineNo">742</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      previous.invalidate();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public Cell getKey() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      byte[] key = new byte[current.keyLength];<a name="line.748"></a>
-<span class="sourceLineNo">749</span>      System.arraycopy(current.keyBuffer, 0, key, 0, current.keyLength);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return new KeyValue.KeyOnlyKeyValue(key);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public ByteBuffer getValueShallowCopy() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      currentBuffer.asSubByteBuffer(current.valueOffset, current.valueLength, tmpPair);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      ByteBuffer dup = tmpPair.getFirst().duplicate();<a name="line.756"></a>
-<span class="sourceLineNo">757</span>      dup.position(tmpPair.getSecond());<a name="line.757"></a>
-<span class="sourceLineNo">758</span>      dup.limit(tmpPair.getSecond() + current.valueLength);<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      return dup.slice();<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    public Cell getCell() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return current.toCell();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span><a name="line.766"></a>
-<span class="sourceLineNo">767</span>    @Override<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    public void rewind() {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      currentBuffer.rewind();<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      if (tagCompressionContext != null) {<a name="line.770"></a>
-<span class="sourceLineNo">771</span>        tagCompressionContext.clear();<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      }<a name="line.772"></a>
-<span class="sourceLineNo">773</span>      decodeFirst();<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      previous.invalidate();<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public boolean next() {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      if (!currentBuffer.hasRemaining()) {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        return false;<a name="line.781"></a>
-<span class="sourceLineNo">782</span>      }<a name="line.782"></a>
-<span class="sourceLineNo">783</span>      decodeNext();<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      previous.invalidate();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>      return true;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span><a name="line.788"></a>
-<span class="sourceLineNo">789</span>    protected void decodeTags() {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      current.tagsLength = ByteBuff.readCompressedInt(currentBuffer);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      if (tagCompressionContext != null) {<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        if (current.uncompressTags) {<a name="line.792"></a>
-<span class="sourceLineNo">793</span>          // Tag compression is been used. uncompress it into tagsBuffer<a name="line.793"></a>
-<span class="sourceLineNo">794</span>          current.ensureSpaceForTags();<a name="line.794"></a>
-<span class="sourceLineNo">795</span>          try {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            current.tagsCompressedLength = tagCompressionContext.uncompressTags(currentBuffer,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>                current.tagsBuffer, 0, current.tagsLength);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>          } catch (IOException e) {<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            throw new RuntimeException("Exception while uncompressing tags", e);<a name="line.799"></a>
-<span class="sourceLineNo">800</span>          }<a name="line.800"></a>
-<span class="sourceLineNo">801</span>        } else {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>          currentBuffer.skip(current.tagsCompressedLength);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>          current.uncompressTags = true;// Reset this.<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        }<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        current.tagsOffset = -1;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>      } else {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        // When tag compress is not used, let us not do copying of tags bytes into tagsBuffer.<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // Just mark the tags Offset so as to create the KV buffer later in getKeyValueBuffer()<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        current.tagsOffset = currentBuffer.position();<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        currentBuffer.skip(current.tagsLength);<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      }<a name="line.811"></a>
-<span class="sourceLineNo">812</span>    }<a name="line.812"></a>
-<span class="sourceLineNo">813</span><a name="line.813"></a>
-<span class="sourceLineNo">814</span>    @Override<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      int rowCommonPrefix = 0;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      int familyCommonPrefix = 0;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      int qualCommonPrefix = 0;<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      previous.invalidate();<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      do {<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        int comp;<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.822"></a>
-<span class="sourceLineNo">823</span>        if (current.lastCommonPrefix != 0) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>          // The KV format has row key length also in the byte array. The<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          // common prefix<a name="line.825"></a>
-<span class="sourceLineNo">826</span>          // includes it. So we need to subtract to find out the common prefix<a name="line.826"></a>
-<span class="sourceLineNo">827</span>          // in the<a name="line.827"></a>
-<span class="sourceLineNo">828</span>          // row part alone<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          rowCommonPrefix = Math.min(rowCommonPrefix, current.lastCommonPrefix - 2);<a name="line.829"></a>
-<span class="sourceLineNo">830</span>        }<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        if (current.lastCommonPrefix &lt;= 2) {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>          rowCommonPrefix = 0;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>        }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        rowCommonPrefix += findCommonPrefixInRowPart(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        comp = compareCommonRowPrefix(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        if (comp == 0) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          comp = compareTypeBytes(seekCell, keyOnlyKV);<a name="line.837"></a>
-<span class="sourceLineNo">838</span>          if (comp == 0) {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>            // Subtract the fixed row key length and the family key fixed length<a name="line.839"></a>
-<span class="sourceLineNo">840</span>            familyCommonPrefix = Math.max(<a name="line.840"></a>
-<span class="sourceLineNo">841</span>                0,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>                Math.min(familyCommonPrefix,<a name="line.842"></a>
-<span class="sourceLineNo">843</span>                    current.lastCommonPrefix - (3 + keyOnlyKV.getRowLength())));<a name="line.843"></a>
-<span class="sourceLineNo">844</span>            familyCommonPrefix += findCommonPrefixInFamilyPart(seekCell, keyOnlyKV,<a name="line.844"></a>
-<span class="sourceLineNo">845</span>                familyCommonPrefix);<a name="line.845"></a>
-<span class="sourceLineNo">846</span>            comp = compareCommonFamilyPrefix(seekCell, keyOnlyKV, familyCommonPrefix);<a name="line.846"></a>
-<span class="sourceLineNo">847</span>            if (comp == 0) {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>              // subtract the rowkey fixed length and the family key fixed<a name="line.848"></a>
-<span class="sourceLineNo">849</span>              // length<a name="line.849"></a>
-<span class="sourceLineNo">850</span>              qualCommonPrefix = Math.max(<a name="line.850"></a>
-<span class="sourceLineNo">851</span>                  0,<a name="line.851"></a>
-<span class="sourceLineNo">852</span>                  Math.min(<a name="line.852"></a>
-<span class="sourceLineNo">853</span>                      qualCommonPrefix,<a name="line.853"></a>
-<span class="sourceLineNo">854</span>                      current.lastCommonPrefix<a name="line.854"></a>
-<span class="sourceLineNo">855</span>                          - (3 + keyOnlyKV.getRowLength() + keyOnlyKV.getFamilyLength())));<a name="line.855"></a>
-<span class="sourceLineNo">856</span>              qualCommonPrefix += findCommonPrefixInQualifierPart(seekCell, keyOnlyKV,<a name="line.856"></a>
-<span class="sourceLineNo">857</span>                  qualCommonPrefix);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>              comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>              if (comp == 0) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>                comp = CellComparator.compareTimestamps(seekCell, keyOnlyKV);<a name="line.860"></a>
-<span class="sourceLineNo">861</span>                if (comp == 0) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>                  // Compare types. Let the delete types sort ahead of puts;<a name="line.862"></a>
-<span class="sourceLineNo">863</span>                  // i.e. types<a name="line.863"></a>
-<span class="sourceLineNo">864</span>                  // of higher numbers sort before those of lesser numbers.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>                  // Maximum<a name="line.865"></a>
-<span class="sourceLineNo">866</span>                  // (255)<a name="line.866"></a>
-<span class="sourceLineNo">867</span>                  // appears ahead of everything, and minimum (0) appears<a name="line.867"></a>
-<span class="sourceLineNo">868</span>                  // after<a name="line.868"></a>
-<span class="sourceLineNo">869</span>                  // everything.<a name="line.869"></a>
-<span class="sourceLineNo">870</span>                  comp = (0xff &amp; keyOnlyKV.getTypeByte()) - (0xff &amp; seekCell.getTypeByte());<a name="line.870"></a>
-<span class="sourceLineNo">871</span>                }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>              }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            }<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        if (comp == 0) { // exact match<a name="line.876"></a>
-<span class="sourceLineNo">877</span>          if (seekBefore) {<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            if (!previous.isValid()) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>              // The caller (seekBefore) has to ensure that we are not at the<a name="line.879"></a>
-<span class="sourceLineNo">880</span>              // first key in the block.<a name="line.880"></a>
-<span class="sourceLineNo">881</span>              throw new IllegalStateException("Cannot seekBefore if "<a name="line.881"></a>
-<span class="sourceLineNo">882</span>                  + "positioned at the first key in the block: key="<a name="line.882"></a>
-<span class="sourceLineNo">883</span>                  + Bytes.toStringBinary(seekCell.getRowArray()));<a name="line.883"></a>
-<span class="sourceLineNo">884</span>            }<a name="line.884"></a>
-<span class="sourceLineNo">885</span>            moveToPrevious();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>            return 1;<a name="line.886"></a>
-<span class="sourceLineNo">887</span>          }<a name="line.887"></a>
-<span class="sourceLineNo">888</span>          return 0;<a name="line.888"></a>
-<span class="sourceLineNo">889</span>        }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>        if (comp &lt; 0) { // already too large, check previous<a name="line.891"></a>
-<span class="sourceLineNo">892</span>          if (previous.isValid()) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>            moveToPrevious();<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          } else {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>            return HConstants.INDEX_KEY_MAGIC; // using optimized index key<a name="line.895"></a>
-<span class="sourceLineNo">896</span>          }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>          return 1;<a name="line.897"></a>
-<span class="sourceLineNo">898</span>        }<a name="line.898"></a>
-<span class="sourceLineNo">899</span><a name="line.899"></a>
-<span class="sourceLineNo">900</span>        // move to next, if more data is available<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        if (currentBuffer.hasRemaining()) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          previous.copyFromNext(current);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>          decodeNext();<a name="line.903"></a>
-<span class="sourceLineNo">904</span>          current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.904"></a>
-<span class="sourceLineNo">905</span>        } else {<a name="line.905"></a>
-<span class="sourceLineNo">906</span>          break;<a name="line.906"></a>
-<span class="sourceLineNo">907</span>        }<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      } while (true);<a name="line.908"></a>
-<span class="sourceLineNo">909</span><a name="line.909"></a>
-<span class="sourceLineNo">910</span>      // we hit the end of the block, not an exact match<a name="line.910"></a>
-<span class="sourceLineNo">911</span>      return 1;<a name="line.911"></a>
-<span class="sourceLineNo">912</span>    }<a name="line.912"></a>
-<span class="sourceLineNo">913</span><a name="line.913"></a>
-<span class="sourceLineNo">914</span>    private int compareTypeBytes(Cell key, Cell right) {<a name="line.914"></a>
-<span class="sourceLineNo">915</span>      if (key.getFamilyLength() + key.getQualifierLength() == 0<a name="line.915"></a>
-<span class="sourceLineNo">916</span>          &amp;&amp; key.getTypeByte() == Type.Minimum.getCode()) {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>        // left is "bigger", i.e. it appears later in the sorted order<a name="line.917"></a>
-<span class="sourceLineNo">918</span>        return 1;<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      }<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      if (right.getFamilyLength() + right.getQualifierLength() == 0<a name="line.920"></a>
-<span class="sourceLineNo">921</span>          &amp;&amp; right.getTypeByte() == Type.Minimum.getCode()) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>        return -1;<a name="line.922"></a>
-<span class="sourceLineNo">923</span>      }<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      return 0;<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span><a name="line.926"></a>
-<span class="sourceLineNo">927</span>    private static int findCommonPrefixInRowPart(Cell left, Cell right, int rowCommonPrefix) {<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      return Bytes.findCommonPrefix(left.getRowArray(), right.getRowArray(), left.getRowLength()<a name="line.928"></a>
-<span class="sourceLineNo">929</span>          - rowCommonPrefix, right.getRowLength() - rowCommonPrefix, left.getRowOffset()<a name="line.929"></a>
-<span class="sourceLineNo">930</span>          + rowCommonPrefix, right.getRowOffset() + rowCommonPrefix);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    }<a name="line.931"></a>
-<span class="sourceLineNo">932</span><a name="line.932"></a>
-<span class="sourceLineNo">933</span>    private static int findCommonPrefixInFamilyPart(Cell left, Cell right, int familyCommonPrefix) {<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      return Bytes<a name="line.934"></a>
-<span class="sourceLineNo">935</span>          .findCommonPrefix(left.getFamilyArray(), right.getFamilyArray(), left.getFamilyLength()<a name="line.935"></a>
-<span class="sourceLineNo">936</span>              - familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix,<a name="line.936"></a>
-<span class="sourceLineNo">937</span>              left.getFamilyOffset() + familyCommonPrefix, right.getFamilyOffset()<a name="line.937"></a>
-<span class="sourceLineNo">938</span>                  + familyCommonPrefix);<a name="line.938"></a>
-<span class="sourceLineNo">939</span>    }<a name="line.939"></a>
-<span class="sourceLineNo">940</span><a name="line.940"></a>
-<span class="sourceLineNo">941</span>    private static int findCommonPrefixInQualifierPart(Cell left, Cell right,<a name="line.941"></a>
-<span class="sourceLineNo">942</span>        int qualifierCommonPrefix) {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      return Bytes.findCommonPrefix(left.getQualifierArray(), right.getQualifierArray(),<a name="line.943"></a>
-<span class="sourceLineNo">944</span>          left.getQualifierLength() - qualifierCommonPrefix, right.getQualifierLength()<a name="line.944"></a>
-<span class="sourceLineNo">945</span>              - qualifierCommonPrefix, left.getQualifierOffset() + qualifierCommonPrefix,<a name="line.945"></a>
-<span class="sourceLineNo">946</span>          right.getQualifierOffset() + qualifierCommonPrefix);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    private void moveToPrevious() {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!previous.isValid()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        throw new IllegalStateException(<a name="line.951"></a>
-<span class="sourceLineNo">952</span>            "Can move back only once and not in first key in the block.");<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      }<a name="line.953"></a>
-<span class="sourceLineNo">954</span><a name="line.954"></a>
-<span class="sourceLineNo">955</span>      STATE tmp = previous;<a name="line.955"></a>
-<span class="sourceLineNo">956</span>      previous = current;<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      current = tmp;<a name="line.957"></a>
-<span class="sourceLineNo">958</span><a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // move after last key value<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      currentBuffer.position(current.nextKvOffset);<a name="line.960"></a>
-<

<TRUNCATED>

[22/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html
index 8e94eb6..6e7cf22 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html
@@ -33,566 +33,565 @@
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.ByteBufferUtils;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.39"></a>
-<span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>/**<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * These methods have the same definition as any implementation of the EncodedSeeker.<a name="line.42"></a>
-<span class="sourceLineNo">043</span> *<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * In the future, the EncodedSeeker could be modified to work with the Cell interface directly.  It<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * currently returns a new KeyValue object each time getKeyValue is called.  This is not horrible,<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * but in order to create a new KeyValue object, we must first allocate a new byte[] and copy in<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * the data from the PrefixTreeCell.  It is somewhat heavyweight right now.<a name="line.47"></a>
-<span class="sourceLineNo">048</span> */<a name="line.48"></a>
-<span class="sourceLineNo">049</span>@InterfaceAudience.Private<a name="line.49"></a>
-<span class="sourceLineNo">050</span>public class PrefixTreeSeeker implements EncodedSeeker {<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>  protected ByteBuffer block;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  protected boolean includeMvccVersion;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  protected PrefixTreeArraySearcher ptSearcher;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  public PrefixTreeSeeker(boolean includeMvccVersion) {<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    this.includeMvccVersion = includeMvccVersion;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  }<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  @Override<a name="line.60"></a>
-<span class="sourceLineNo">061</span>  public void setCurrentBuffer(ByteBuff fullBlockBuffer) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>    ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>    rewind();<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  /**<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   * &lt;p&gt;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>   * Currently unused.<a name="line.68"></a>
-<span class="sourceLineNo">069</span>   * &lt;/p&gt;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>   * TODO performance leak. should reuse the searchers. hbase does not currently have a hook where<a name="line.70"></a>
-<span class="sourceLineNo">071</span>   * this can be called<a name="line.71"></a>
-<span class="sourceLineNo">072</span>   */<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  public void releaseCurrentSearcher(){<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    DecoderFactory.checkIn(ptSearcher);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.ByteBufferUtils;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>/**<a name="line.40"></a>
+<span class="sourceLineNo">041</span> * These methods have the same definition as any implementation of the EncodedSeeker.<a name="line.41"></a>
+<span class="sourceLineNo">042</span> *<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * In the future, the EncodedSeeker could be modified to work with the Cell interface directly.  It<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * currently returns a new KeyValue object each time getKeyValue is called.  This is not horrible,<a name="line.44"></a>
+<span class="sourceLineNo">045</span> * but in order to create a new KeyValue object, we must first allocate a new byte[] and copy in<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * the data from the PrefixTreeCell.  It is somewhat heavyweight right now.<a name="line.46"></a>
+<span class="sourceLineNo">047</span> */<a name="line.47"></a>
+<span class="sourceLineNo">048</span>@InterfaceAudience.Private<a name="line.48"></a>
+<span class="sourceLineNo">049</span>public class PrefixTreeSeeker implements EncodedSeeker {<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>  protected ByteBuffer block;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  protected boolean includeMvccVersion;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  protected PrefixTreeArraySearcher ptSearcher;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  public PrefixTreeSeeker(boolean includeMvccVersion) {<a name="line.55"></a>
+<span class="sourceLineNo">056</span>    this.includeMvccVersion = includeMvccVersion;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
+<span class="sourceLineNo">058</span><a name="line.58"></a>
+<span class="sourceLineNo">059</span>  @Override<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  public void setCurrentBuffer(ByteBuff fullBlockBuffer) {<a name="line.60"></a>
+<span class="sourceLineNo">061</span>    ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion);<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    rewind();<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  }<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  /**<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * &lt;p&gt;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   * Currently unused.<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   * &lt;/p&gt;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   * TODO performance leak. should reuse the searchers. hbase does not currently have a hook where<a name="line.69"></a>
+<span class="sourceLineNo">070</span>   * this can be called<a name="line.70"></a>
+<span class="sourceLineNo">071</span>   */<a name="line.71"></a>
+<span class="sourceLineNo">072</span>  public void releaseCurrentSearcher(){<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    DecoderFactory.checkIn(ptSearcher);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  }<a name="line.74"></a>
+<span class="sourceLineNo">075</span><a name="line.75"></a>
 <span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  @Override<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  public Cell getKey() {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    return ptSearcher.current();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  }<a name="line.81"></a>
+<span class="sourceLineNo">077</span>  @Override<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  public Cell getKey() {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    return ptSearcher.current();<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
+<span class="sourceLineNo">081</span><a name="line.81"></a>
 <span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  @Override<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public ByteBuffer getValueShallowCopy() {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    return CellUtil.getValueBufferShallowCopy(ptSearcher.current());<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  }<a name="line.87"></a>
-<span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>  /**<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * currently must do deep copy into new array<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   */<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  @Override<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public Cell getCell() {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    // The PrefixTreecell is of type BytebufferedCell and the value part of the cell<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    // determines whether we are offheap cell or onheap cell.  All other parts of the cell-<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    // row, fam and col are all represented as onheap byte[]<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current();<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    if (cell == null) {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      return null;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    }<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    // Use the ByteBuffered cell to see if the Cell is onheap or offheap<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    if (cell.getValueByteBuffer().hasArray()) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.103"></a>
-<span class="sourceLineNo">104</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.105"></a>
-<span class="sourceLineNo">106</span>          cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),<a name="line.106"></a>
-<span class="sourceLineNo">107</span>          cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          cell.getSequenceId());<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    } else {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.110"></a>
-<span class="sourceLineNo">111</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.111"></a>
-<span class="sourceLineNo">112</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.112"></a>
-<span class="sourceLineNo">113</span>          cell.getValueByteBuffer(), cell.getValuePosition(), cell.getValueLength(),<a name="line.113"></a>
-<span class="sourceLineNo">114</span>          cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(),<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          cell.getTypeByte(), cell.getSequenceId());<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    }<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  }<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  /**<a name="line.119"></a>
-<span class="sourceLineNo">120</span>   * &lt;p&gt;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   * Currently unused.<a name="line.121"></a>
-<span class="sourceLineNo">122</span>   * &lt;/p&gt;&lt;p&gt;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   * A nice, lightweight reference, though the underlying cell is transient. This method may return<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * the same reference to the backing PrefixTreeCell repeatedly, while other implementations may<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * return a different reference for each Cell.<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * &lt;/p&gt;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to<a name="line.127"></a>
-<span class="sourceLineNo">128</span>   * use this method instead of the getKeyValue() methods above.<a name="line.128"></a>
-<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  public Cell get() {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    return ptSearcher.current();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  }<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  @Override<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public void rewind() {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    ptSearcher.positionAtFirstCell();<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  @Override<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  public boolean next() {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    return ptSearcher.advance();<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
-<span class="sourceLineNo">143</span><a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public boolean advance() {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    return ptSearcher.advance();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>  }<a name="line.146"></a>
+<span class="sourceLineNo">083</span>  @Override<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  public ByteBuffer getValueShallowCopy() {<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    return CellUtil.getValueBufferShallowCopy(ptSearcher.current());<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  }<a name="line.86"></a>
+<span class="sourceLineNo">087</span><a name="line.87"></a>
+<span class="sourceLineNo">088</span>  /**<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * currently must do deep copy into new array<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  @Override<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  public Cell getCell() {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    // The PrefixTreecell is of type BytebufferedCell and the value part of the cell<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    // determines whether we are offheap cell or onheap cell.  All other parts of the cell-<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    // row, fam and col are all represented as onheap byte[]<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current();<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    if (cell == null) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      return null;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    }<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    // Use the ByteBuffered cell to see if the Cell is onheap or offheap<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    if (cell.getValueByteBuffer().hasArray()) {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.102"></a>
+<span class="sourceLineNo">103</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.104"></a>
+<span class="sourceLineNo">105</span>          cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),<a name="line.106"></a>
+<span class="sourceLineNo">107</span>          cell.getSequenceId());<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    } else {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.109"></a>
+<span class="sourceLineNo">110</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.110"></a>
+<span class="sourceLineNo">111</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.111"></a>
+<span class="sourceLineNo">112</span>          cell.getValueByteBuffer(), cell.getValuePosition(), cell.getValueLength(),<a name="line.112"></a>
+<span class="sourceLineNo">113</span>          cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(),<a name="line.113"></a>
+<span class="sourceLineNo">114</span>          cell.getTypeByte(), cell.getSequenceId());<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * &lt;p&gt;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * Currently unused.<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   * &lt;/p&gt;&lt;p&gt;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * A nice, lightweight reference, though the underlying cell is transient. This method may return<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * the same reference to the backing PrefixTreeCell repeatedly, while other implementations may<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   * return a different reference for each Cell.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   * &lt;/p&gt;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to<a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * use this method instead of the getKeyValue() methods above.<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  public Cell get() {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    return ptSearcher.current();<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public void rewind() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    ptSearcher.positionAtFirstCell();<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>  @Override<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  public boolean next() {<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    return ptSearcher.advance();<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  }<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public boolean advance() {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    return ptSearcher.advance();<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
+<span class="sourceLineNo">146</span><a name="line.146"></a>
 <span class="sourceLineNo">147</span><a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  private static final boolean USE_POSITION_BEFORE = false;<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  /*<a name="line.151"></a>
-<span class="sourceLineNo">152</span>   * Support both of these options since the underlying PrefixTree supports<a name="line.152"></a>
-<span class="sourceLineNo">153</span>   * both. Possibly expand the EncodedSeeker to utilize them both.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   */<a name="line.154"></a>
-<span class="sourceLineNo">155</span><a name="line.155"></a>
-<span class="sourceLineNo">156</span>  protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    // this does a deep copy of the key byte[] because the CellSearcher<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    // interface wants a Cell<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>    if (CellScannerPosition.AT == position) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      if (seekBefore) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        ptSearcher.previous();<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        return 1;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      return 0;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    }<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    return 1;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) {<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    // should probably switch this to use the seekForwardToOrBefore method<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    if (CellScannerPosition.AT == position) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      if (seekBefore) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>        ptSearcher.previous();<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        return 1;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      return 0;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>    if (CellScannerPosition.AFTER == position) {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      if (!ptSearcher.isBeforeFirst()) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>        ptSearcher.previous();<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      }<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      return 1;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    }<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>    if (position == CellScannerPosition.AFTER_LAST) {<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      if (seekBefore) {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        ptSearcher.previous();<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      return 1;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>    throw new RuntimeException("unexpected CellScannerPosition:" + position);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>  }<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>  @Override<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    if (USE_POSITION_BEFORE) {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    } else {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      return seekToOrBeforeUsingPositionAtOrAfter(key, forceBeforeOnExactMatch);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    }<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  }<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>  @Override<a name="line.211"></a>
-<span class="sourceLineNo">212</span>  public int compareKey(CellComparator comparator, Cell key) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    return comparator.compare(key,<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        ptSearcher.current());<a name="line.214"></a>
-<span class="sourceLineNo">215</span>  }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /**<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * Cloned version of the PrefixTreeCell where except the value part, the rest<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   * of the key part is deep copied<a name="line.219"></a>
-<span class="sourceLineNo">220</span>   *<a name="line.220"></a>
-<span class="sourceLineNo">221</span>   */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    private byte[] row;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    private short rowLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    private byte[] fam;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    private byte famLength;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    private byte[] qual;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    private int qualLength;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    private byte[] val;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    private int valOffset;<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    private int valLength;<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    private byte[] tag;<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    private int tagsLength;<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    private long ts;<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    private long seqId;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private byte type;<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val,<a name="line.242"></a>
-<span class="sourceLineNo">243</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        long seqId) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      this.row = new byte[rowLength];<a name="line.245"></a>
-<span class="sourceLineNo">246</span>      System.arraycopy(row, rowOffset, this.row, 0, rowLength);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      this.rowLength = rowLength;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      this.fam = new byte[famLength];<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      System.arraycopy(fam, famOffset, this.fam, 0, famLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      this.famLength = famLength;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>      this.qual = new byte[qualLength];<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      System.arraycopy(qual, qualOffset, this.qual, 0, qualLength);<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      this.qualLength = qualLength;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      this.tag = new byte[tagLength];<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      System.arraycopy(tag, tagOffset, this.tag, 0, tagLength);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      this.tagsLength = tagLength;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      this.val = val;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      this.valLength = valLength;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.valOffset = valOffset;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.ts = ts;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      this.seqId = seqId;<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      this.type = type;<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    }<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>    @Override<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    public void setSequenceId(long seqId) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.seqId = seqId;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    @Override<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public byte[] getRowArray() {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      return this.row;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    }<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>    @Override<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    public int getRowOffset() {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      return 0;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    }<a name="line.278"></a>
-<span class="sourceLineNo">279</span><a name="line.279"></a>
-<span class="sourceLineNo">280</span>    @Override<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    public short getRowLength() {<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      return this.rowLength;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span><a name="line.284"></a>
-<span class="sourceLineNo">285</span>    @Override<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    public byte[] getFamilyArray() {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      return this.fam;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>    @Override<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    public int getFamilyOffset() {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return 0;<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>    @Override<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    public byte getFamilyLength() {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      return this.famLength;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    @Override<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    public byte[] getQualifierArray() {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      return this.qual;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    }<a name="line.303"></a>
-<span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>    @Override<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    public int getQualifierOffset() {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      return 0;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>    @Override<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    public int getQualifierLength() {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      return this.qualLength;<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>    @Override<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    public long getTimestamp() {<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      return ts;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    }<a name="line.318"></a>
-<span class="sourceLineNo">319</span><a name="line.319"></a>
-<span class="sourceLineNo">320</span>    @Override<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    public byte getTypeByte() {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      return type;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    }<a name="line.323"></a>
-<span class="sourceLineNo">324</span><a name="line.324"></a>
-<span class="sourceLineNo">325</span>    @Override<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    public long getSequenceId() {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      return seqId;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    }<a name="line.328"></a>
-<span class="sourceLineNo">329</span><a name="line.329"></a>
-<span class="sourceLineNo">330</span>    @Override<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    public byte[] getValueArray() {<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      return val;<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    }<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>    @Override<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    public int getValueOffset() {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>      return this.valOffset;<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    }<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>    @Override<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    public int getValueLength() {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>      return this.valLength;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    }<a name="line.343"></a>
-<span class="sourceLineNo">344</span><a name="line.344"></a>
-<span class="sourceLineNo">345</span>    @Override<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    public byte[] getTagsArray() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      return this.tag;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    }<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>    @Override<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    public int getTagsOffset() {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return 0;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span><a name="line.354"></a>
-<span class="sourceLineNo">355</span>    @Override<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    public int getTagsLength() {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      return this.tagsLength;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>    @Override<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    public String toString() {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.364"></a>
-<span class="sourceLineNo">365</span>          getQualifierLength());<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.367"></a>
-<span class="sourceLineNo">368</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>    @Override<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    public long heapSize() {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    }<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell,<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      SettableSequenceId, HeapSize {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.379"></a>
-<span class="sourceLineNo">380</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER));<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    private ByteBuffer rowBuff;<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    private short rowLength;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>    private ByteBuffer famBuff;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    private byte famLength;<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    private ByteBuffer qualBuff;<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    private int qualLength;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    private ByteBuffer val;<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    private int valOffset;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private int valLength;<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    private ByteBuffer tagBuff;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    private int tagsLength;<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    private long ts;<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private long seqId;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    private byte type;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>        long seqId) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>      byte[] tmpRow = new byte[rowLength];<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      System.arraycopy(row, rowOffset, tmpRow, 0, rowLength);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      this.rowBuff = ByteBuffer.wrap(tmpRow);<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      this.rowLength = rowLength;<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      byte[] tmpFam = new byte[famLength];<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      System.arraycopy(fam, famOffset, tmpFam, 0, famLength);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      this.famBuff = ByteBuffer.wrap(tmpFam);<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      this.famLength = famLength;<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      byte[] tmpQual = new byte[qualLength];<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength);<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      this.qualBuff = ByteBuffer.wrap(tmpQual);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      this.qualLength = qualLength;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>      byte[] tmpTag = new byte[tagLength];<a name="line.412"></a>
-<span class="sourceLineNo">413</span>      System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      this.tagBuff = ByteBuffer.wrap(tmpTag);<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      this.tagsLength = tagLength;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>      this.val = val;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      this.valLength = valLength;<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      this.valOffset = valOffset;<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      this.ts = ts;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>      this.seqId = seqId;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      this.type = type;<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    <a name="line.423"></a>
-<span class="sourceLineNo">424</span>    @Override<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    public void setSequenceId(long seqId) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>      this.seqId = seqId;<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    }<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    @Override<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    public byte[] getRowArray() {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      return this.rowBuff.array();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    public int getRowOffset() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      return getRowPosition();<a name="line.436"></a>
-<span class="sourceLineNo">437</span>    }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    public short getRowLength() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>      return this.rowLength;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>    @Override<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    public byte[] getFamilyArray() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      return this.famBuff.array();<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
-<span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    @Override<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    public int getFamilyOffset() {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return getFamilyPosition();<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>    @Override<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    public byte getFamilyLength() {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>      return this.famLength;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    @Override<a name="line.459"></a>
-<span class="sourceLineNo">460</span>    public byte[] getQualifierArray() {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      return this.qualBuff.array();<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    }<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    @Override<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    public int getQualifierOffset() {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      return getQualifierPosition();<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
-<span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>    @Override<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    public int getQualifierLength() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      return this.qualLength;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @Override<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    public long getTimestamp() {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      return ts;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    }<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>    @Override<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    public byte getTypeByte() {<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      return type;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    }<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    @Override<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    public long getSequenceId() {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      return seqId;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    @Override<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    public byte[] getValueArray() {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      byte[] tmpVal = new byte[valLength];<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      ByteBufferUtils.copyFromBufferToArray(tmpVal, val, valOffset, 0, valLength);<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      return tmpVal;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>    @Override<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    public int getValueOffset() {<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      return 0;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    }<a name="line.499"></a>
-<span class="sourceLineNo">500</span><a name="line.500"></a>
-<span class="sourceLineNo">501</span>    @Override<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    public int getValueLength() {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      return this.valLength;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    }<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>    @Override<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    public byte[] getTagsArray() {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      return this.tagBuff.array();<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public int getTagsOffset() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return getTagsPosition();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getTagsLength() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return this.tagsLength;<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>    <a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public ByteBuffer getRowByteBuffer() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowBuff;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    <a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public int getRowPosition() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return 0;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    <a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return this.famBuff;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    <a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public int getFamilyPosition() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return 0;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    <a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.qualBuff;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierPosition() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return 0;<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.tagBuff;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public int getTagsPosition() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return 0;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public ByteBuffer getValueByteBuffer() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.val;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public int getValuePosition() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.valOffset;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public long heapSize() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public String toString() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.580"></a>
-<span class="sourceLineNo">581</span>          getQualifierLength());<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.583"></a>
-<span class="sourceLineNo">584</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    }<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>}<a name="line.587"></a>
+<span class="sourceLineNo">148</span>  private static final boolean USE_POSITION_BEFORE = false;<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>  /*<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * Support both of these options since the underlying PrefixTree supports<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * both. Possibly expand the EncodedSeeker to utilize them both.<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   */<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>    // this does a deep copy of the key byte[] because the CellSearcher<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    // interface wants a Cell<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>    if (CellScannerPosition.AT == position) {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      if (seekBefore) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        ptSearcher.previous();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>        return 1;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      return 0;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
+<span class="sourceLineNo">167</span><a name="line.167"></a>
+<span class="sourceLineNo">168</span>    return 1;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  }<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>  protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) {<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    // should probably switch this to use the seekForwardToOrBefore method<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>    if (CellScannerPosition.AT == position) {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      if (seekBefore) {<a name="line.176"></a>
+<span class="sourceLineNo">177</span>        ptSearcher.previous();<a name="line.177"></a>
+<span class="sourceLineNo">178</span>        return 1;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      }<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      return 0;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>    if (CellScannerPosition.AFTER == position) {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      if (!ptSearcher.isBeforeFirst()) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>        ptSearcher.previous();<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      }<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      return 1;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    }<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>    if (position == CellScannerPosition.AFTER_LAST) {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      if (seekBefore) {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        ptSearcher.previous();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      }<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      return 1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>    throw new RuntimeException("unexpected CellScannerPosition:" + position);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>  @Override<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) {<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    if (USE_POSITION_BEFORE) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    } else {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      return seekToOrBeforeUsingPositionAtOrAfter(key, forceBeforeOnExactMatch);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>  }<a name="line.208"></a>
+<span class="sourceLineNo">209</span><a name="line.209"></a>
+<span class="sourceLineNo">210</span>  @Override<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  public int compareKey(CellComparator comparator, Cell key) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    return comparator.compare(key,<a name="line.212"></a>
+<span class="sourceLineNo">213</span>        ptSearcher.current());<a name="line.213"></a>
+<span class="sourceLineNo">214</span>  }<a name="line.214"></a>
+<span class="sourceLineNo">215</span><a name="line.215"></a>
+<span class="sourceLineNo">216</span>  /**<a name="line.216"></a>
+<span class="sourceLineNo">217</span>   * Cloned version of the PrefixTreeCell where except the value part, the rest<a name="line.217"></a>
+<span class="sourceLineNo">218</span>   * of the key part is deep copied<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   *<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   */<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    private byte[] row;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    private short rowLength;<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    private byte[] fam;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    private byte famLength;<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    private byte[] qual;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    private int qualLength;<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    private byte[] val;<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    private int valOffset;<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    private int valLength;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    private byte[] tag;<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    private int tagsLength;<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    private long ts;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    private long seqId;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    private byte type;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>    public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.240"></a>
+<span class="sourceLineNo">241</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val,<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.242"></a>
+<span class="sourceLineNo">243</span>        long seqId) {<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      this.row = new byte[rowLength];<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      System.arraycopy(row, rowOffset, this.row, 0, rowLength);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      this.rowLength = rowLength;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      this.fam = new byte[famLength];<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      System.arraycopy(fam, famOffset, this.fam, 0, famLength);<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      this.famLength = famLength;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      this.qual = new byte[qualLength];<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      System.arraycopy(qual, qualOffset, this.qual, 0, qualLength);<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      this.qualLength = qualLength;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      this.tag = new byte[tagLength];<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      System.arraycopy(tag, tagOffset, this.tag, 0, tagLength);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      this.tagsLength = tagLength;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      this.val = val;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      this.valLength = valLength;<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      this.valOffset = valOffset;<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      this.ts = ts;<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.seqId = seqId;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.type = type;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    @Override<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    public void setSequenceId(long seqId) {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      this.seqId = seqId;<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    }<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>    @Override<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    public byte[] getRowArray() {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      return this.row;<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    }<a name="line.272"></a>
+<span class="sourceLineNo">273</span><a name="line.273"></a>
+<span class="sourceLineNo">274</span>    @Override<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    public int getRowOffset() {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      return 0;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    }<a name="line.277"></a>
+<span class="sourceLineNo">278</span><a name="line.278"></a>
+<span class="sourceLineNo">279</span>    @Override<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public short getRowLength() {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this.rowLength;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public byte[] getFamilyArray() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return this.fam;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public int getFamilyOffset() {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      return 0;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    @Override<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public byte getFamilyLength() {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return this.famLength;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>    @Override<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    public byte[] getQualifierArray() {<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      return this.qual;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>    @Override<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    public int getQualifierOffset() {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      return 0;<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>    @Override<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    public int getQualifierLength() {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>      return this.qualLength;<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
+<span class="sourceLineNo">313</span><a name="line.313"></a>
+<span class="sourceLineNo">314</span>    @Override<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    public long getTimestamp() {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>      return ts;<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>    @Override<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    public byte getTypeByte() {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      return type;<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>    @Override<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    public long getSequenceId() {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      return seqId;<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    @Override<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    public byte[] getValueArray() {<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      return val;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    }<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>    @Override<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    public int getValueOffset() {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>      return this.valOffset;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    }<a name="line.337"></a>
+<span class="sourceLineNo">338</span><a name="line.338"></a>
+<span class="sourceLineNo">339</span>    @Override<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    public int getValueLength() {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      return this.valLength;<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    }<a name="line.342"></a>
+<span class="sourceLineNo">343</span><a name="line.343"></a>
+<span class="sourceLineNo">344</span>    @Override<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    public byte[] getTagsArray() {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>      return this.tag;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    }<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>    @Override<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    public int getTagsOffset() {<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      return 0;<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    }<a name="line.352"></a>
+<span class="sourceLineNo">353</span><a name="line.353"></a>
+<span class="sourceLineNo">354</span>    @Override<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    public int getTagsLength() {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      return this.tagsLength;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span><a name="line.358"></a>
+<span class="sourceLineNo">359</span>    @Override<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    public String toString() {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.361"></a>
+<span class="sourceLineNo">362</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          getQualifierLength());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.366"></a>
+<span class="sourceLineNo">367</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    }<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>    @Override<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    public long heapSize() {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  }<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell,<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      SettableSequenceId, HeapSize {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER));<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    private ByteBuffer rowBuff;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    private short rowLength;<a name="line.382"></a>
+<span class="sourceLineNo">383</span>    private ByteBuffer famBuff;<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    private byte famLength;<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    private ByteBuffer qualBuff;<a name="line.385"></a>
+<span class="sourceLineNo">386</span>    private int qualLength;<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    private ByteBuffer val;<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    private int valOffset;<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    private int valLength;<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    private ByteBuffer tagBuff;<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    private int tagsLength;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    private long ts;<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    private long seqId;<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    private byte type;<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val,<a name="line.396"></a>
+<span class="sourceLineNo">397</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        long seqId) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      byte[] tmpRow = new byte[rowLength];<a name="line.399"></a>
+<span class="sourceLineNo">400</span>      System.arraycopy(row, rowOffset, tmpRow, 0, rowLength);<a name="line.400"></a>
+<span class="sourceLineNo">401</span>      this.rowBuff = ByteBuffer.wrap(tmpRow);<a name="line.401"></a>
+<span class="sourceLineNo">402</span>      this.rowLength = rowLength;<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      byte[] tmpFam = new byte[famLength];<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      System.arraycopy(fam, famOffset, tmpFam, 0, famLength);<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      this.famBuff = ByteBuffer.wrap(tmpFam);<a name="line.405"></a>
+<span class="sourceLineNo">406</span>      this.famLength = famLength;<a name="line.406"></a>
+<span class="sourceLineNo">407</span>      byte[] tmpQual = new byte[qualLength];<a name="line.407"></a>
+<span class="sourceLineNo">408</span>      System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength);<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      this.qualBuff = ByteBuffer.wrap(tmpQual);<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      this.qualLength = qualLength;<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      byte[] tmpTag = new byte[tagLength];<a name="line.411"></a>
+<span class="sourceLineNo">412</span>      System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength);<a name="line.412"></a>
+<span class="sourceLineNo">413</span>      this.tagBuff = ByteBuffer.wrap(tmpTag);<a name="line.413"></a>
+<span class="sourceLineNo">414</span>      this.tagsLength = tagLength;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>      this.val = val;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>      this.valLength = valLength;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      this.valOffset = valOffset;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>      this.ts = ts;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      this.seqId = seqId;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>      this.type = type;<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    }<a name="line.421"></a>
+<span class="sourceLineNo">422</span>    <a name="line.422"></a>
+<span class="sourceLineNo">423</span>    @Override<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    public void setSequenceId(long seqId) {<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      this.seqId = seqId;<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span><a name="line.427"></a>
+<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    public byte[] getRowArray() {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      return this.rowBuff.array();<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    public int getRowOffset() {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      return getRowPosition();<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    public short getRowLength() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      return this.rowLength;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>    public byte[] getFamilyArray() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      return this.famBuff.array();<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>    @Override<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    public int getFamilyOffset() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      return getFamilyPosition();<a name="line.450"></a>
+<span class="sourceLineNo">451</span>  

<TRUNCATED>

[46/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/book.html
----------------------------------------------------------------------
diff --git a/book.html b/book.html
index ae97541..e278127 100644
--- a/book.html
+++ b/book.html
@@ -33211,7 +33211,7 @@ The server will return cellblocks compressed using this same compressor as long
 <div id="footer">
 <div id="footer-text">
 Version 2.0.0-SNAPSHOT<br>
-Last updated 2016-03-03 14:47:28 UTC
+Last updated 2016-03-04 14:49:29 UTC
 </div>
 </div>
 </body>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/bulk-loads.html
----------------------------------------------------------------------
diff --git a/bulk-loads.html b/bulk-loads.html
index 4ff8f73..663911e 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Bulk Loads in Apache HBase (TM)
@@ -305,7 +305,7 @@ under the License. -->
                         <a href="http://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2016-03-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2016-03-04</li>
             </p>
                 </div>
 


[17/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
index d8b6ca7..66dbcf3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
@@ -31,12 +31,12 @@
 <span class="sourceLineNo">023</span>import java.nio.ByteBuffer;<a name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.ByteBufferedCell;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.Cell;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HConstants;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.31"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.Cell;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.31"></a>
 <span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.32"></a>
 <span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.34"></a>
@@ -60,1113 +60,1115 @@
 <span class="sourceLineNo">052</span> */<a name="line.52"></a>
 <span class="sourceLineNo">053</span>@InterfaceAudience.Private<a name="line.53"></a>
 <span class="sourceLineNo">054</span>abstract class BufferedDataBlockEncoder implements DataBlockEncoder {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static int INITIAL_KEY_BUFFER_SIZE = 512;<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  @Override<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public ByteBuffer decodeKeyValues(DataInputStream source,<a name="line.59"></a>
-<span class="sourceLineNo">060</span>      HFileBlockDecodingContext blkDecodingCtx) throws IOException {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      throw new IOException(this.getClass().getName() + " only accepts "<a name="line.62"></a>
-<span class="sourceLineNo">063</span>          + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>    HFileBlockDefaultDecodingContext decodingCtx =<a name="line.66"></a>
-<span class="sourceLineNo">067</span>        (HFileBlockDefaultDecodingContext) blkDecodingCtx;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    if (decodingCtx.getHFileContext().isIncludesTags()<a name="line.68"></a>
-<span class="sourceLineNo">069</span>        &amp;&amp; decodingCtx.getHFileContext().isCompressTags()) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      if (decodingCtx.getTagCompressionContext() != null) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        // It will be overhead to create the TagCompressionContext again and again for every block<a name="line.71"></a>
-<span class="sourceLineNo">072</span>        // decoding.<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        decodingCtx.getTagCompressionContext().clear();<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      } else {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>        try {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>          TagCompressionContext tagCompressionContext = new TagCompressionContext(<a name="line.76"></a>
-<span class="sourceLineNo">077</span>              LRUDictionary.class, Byte.MAX_VALUE);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>          decodingCtx.setTagCompressionContext(tagCompressionContext);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>        } catch (Exception e) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>          throw new IOException("Failed to initialize TagCompressionContext", e);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>        }<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    return internalDecodeKeyValues(source, 0, 0, decodingCtx);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  /********************* common prefixes *************************/<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  // Having this as static is fine but if META is having DBE then we should<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  // change this.<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public static int compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    return Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        left.getRowLength() - rowCommonPrefix, right.getRowArray(), right.getRowOffset()<a name="line.92"></a>
-<span class="sourceLineNo">093</span>            + rowCommonPrefix, right.getRowLength() - rowCommonPrefix);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static int compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + familyCommonPrefix,<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        left.getFamilyLength() - familyCommonPrefix, right.getFamilyArray(),<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        right.getFamilyOffset() + familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public static int compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + qualCommonPrefix,<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        left.getQualifierLength() - qualCommonPrefix, right.getQualifierArray(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        right.getQualifierOffset() + qualCommonPrefix, right.getQualifierLength()<a name="line.105"></a>
-<span class="sourceLineNo">106</span>            - qualCommonPrefix);<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected static class SeekerState {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    protected ByteBuff currentBuffer;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    protected TagCompressionContext tagCompressionContext;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    protected int valueOffset = -1;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    protected int keyLength;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    protected int valueLength;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    protected int lastCommonPrefix;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    protected int tagsLength = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    protected int tagsOffset = -1;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    protected int tagsCompressedLength = 0;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    protected boolean uncompressTags = true;<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    /** We need to store a copy of the key. */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    protected byte[] keyBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    protected byte[] tagsBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    protected long memstoreTS;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    protected int nextKvOffset;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    protected KeyValue.KeyOnlyKeyValue currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    // many object creations.<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    private final ObjectIntPair&lt;ByteBuffer&gt; tmpPair;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    private final boolean includeTags;<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public SeekerState(ObjectIntPair&lt;ByteBuffer&gt; tmpPair, boolean includeTags) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      this.tmpPair = tmpPair;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      this.includeTags = includeTags;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    protected boolean isValid() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      return valueOffset != -1;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    protected void invalidate() {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      valueOffset = -1;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      tagsCompressedLength = 0;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      uncompressTags = true;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      currentBuffer = null;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    protected void ensureSpaceForKey() {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (keyLength &gt; keyBuffer.length) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        // rare case, but we need to handle arbitrary length of key<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        int newKeyBufferLength = Math.max(keyBuffer.length, 1) * 2;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        while (keyLength &gt; newKeyBufferLength) {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>          newKeyBufferLength *= 2;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        }<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        byte[] newKeyBuffer = new byte[newKeyBufferLength];<a name="line.157"></a>
-<span class="sourceLineNo">158</span>        System.arraycopy(keyBuffer, 0, newKeyBuffer, 0, keyBuffer.length);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        keyBuffer = newKeyBuffer;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>    protected void ensureSpaceForTags() {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      if (tagsLength &gt; tagsBuffer.length) {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        // rare case, but we need to handle arbitrary length of tags<a name="line.165"></a>
-<span class="sourceLineNo">166</span>        int newTagsBufferLength = Math.max(tagsBuffer.length, 1) * 2;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        while (tagsLength &gt; newTagsBufferLength) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>          newTagsBufferLength *= 2;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        byte[] newTagsBuffer = new byte[newTagsBufferLength];<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        System.arraycopy(tagsBuffer, 0, newTagsBuffer, 0, tagsBuffer.length);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        tagsBuffer = newTagsBuffer;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    protected void setKey(byte[] keyBuffer, long memTS) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      currentKey.setKey(keyBuffer, 0, keyLength);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      memstoreTS = memTS;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    /**<a name="line.181"></a>
-<span class="sourceLineNo">182</span>     * Copy the state from the next one into this instance (the previous state<a name="line.182"></a>
-<span class="sourceLineNo">183</span>     * placeholder). Used to save the previous state when we are advancing the<a name="line.183"></a>
-<span class="sourceLineNo">184</span>     * seeker to the next key/value.<a name="line.184"></a>
-<span class="sourceLineNo">185</span>     */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    protected void copyFromNext(SeekerState nextState) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      if (keyBuffer.length != nextState.keyBuffer.length) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>        keyBuffer = nextState.keyBuffer.clone();<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      } else if (!isValid()) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        // Note: we can only call isValid before we override our state, so this<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        // comes before all the assignments at the end of this method.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        System.arraycopy(nextState.keyBuffer, 0, keyBuffer, 0,<a name="line.192"></a>
-<span class="sourceLineNo">193</span>             nextState.keyLength);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      } else {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        // don't copy the common prefix between this key and the previous one<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        System.arraycopy(nextState.keyBuffer, nextState.lastCommonPrefix,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>            keyBuffer, nextState.lastCommonPrefix, nextState.keyLength<a name="line.197"></a>
-<span class="sourceLineNo">198</span>                - nextState.lastCommonPrefix);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      }<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      currentKey = nextState.currentKey;<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>      valueOffset = nextState.valueOffset;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      keyLength = nextState.keyLength;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      valueLength = nextState.valueLength;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      lastCommonPrefix = nextState.lastCommonPrefix;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      nextKvOffset = nextState.nextKvOffset;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      memstoreTS = nextState.memstoreTS;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      currentBuffer = nextState.currentBuffer;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      tagsOffset = nextState.tagsOffset;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      tagsLength = nextState.tagsLength;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (nextState.tagCompressionContext != null) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        tagCompressionContext = nextState.tagCompressionContext;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    public Cell toCell() {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      // Buffer backing the value and tags part from the HFileBlock's buffer<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      // When tag compression in use, this will be only the value bytes area.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ByteBuffer valAndTagsBuffer;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      int vOffset;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      int valAndTagsLength = this.valueLength;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      int tagsLenSerializationSize = 0;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      if (this.includeTags &amp;&amp; this.tagCompressionContext == null) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        // Include the tags part also. This will be the tags bytes + 2 bytes of for storing tags<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        // length<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        tagsLenSerializationSize = this.tagsOffset - (this.valueOffset + this.valueLength);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        valAndTagsLength += tagsLenSerializationSize + this.tagsLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      this.currentBuffer.asSubByteBuffer(this.valueOffset, valAndTagsLength, this.tmpPair);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      valAndTagsBuffer = this.tmpPair.getFirst();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      vOffset = this.tmpPair.getSecond();// This is the offset to value part in the BB<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      if (valAndTagsBuffer.hasArray()) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        return toOnheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      } else {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        return toOffheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private Cell toOnheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>        int tagsLenSerializationSize) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      byte[] tagsArray = HConstants.EMPTY_BYTE_ARRAY;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      int tOffset = 0;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      if (this.includeTags) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        if (this.tagCompressionContext == null) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          tagsArray = valAndTagsBuffer.array();<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          tOffset = valAndTagsBuffer.arrayOffset() + vOffset + this.valueLength<a name="line.246"></a>
-<span class="sourceLineNo">247</span>              + tagsLenSerializationSize;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        } else {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          tagsArray = Bytes.copy(tagsBuffer, 0, this.tagsLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          tOffset = 0;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      return new OnheapDecodedCell(Bytes.copy(keyBuffer, 0, this.keyLength),<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer.array(),<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          valAndTagsBuffer.arrayOffset() + vOffset, this.valueLength, memstoreTS, tagsArray,<a name="line.257"></a>
-<span class="sourceLineNo">258</span>          tOffset, this.tagsLength);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>    private Cell toOffheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        int tagsLenSerializationSize) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      ByteBuffer tagsBuf =  HConstants.EMPTY_BYTE_BUFFER;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      int tOffset = 0;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (this.includeTags) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        if (this.tagCompressionContext == null) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          tagsBuf = valAndTagsBuffer;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          tOffset = vOffset + this.valueLength + tagsLenSerializationSize;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        } else {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          tagsBuf = ByteBuffer.wrap(Bytes.copy(tagsBuffer, 0, this.tagsLength));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          tOffset = 0;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return new OffheapDecodedCell(ByteBuffer.wrap(Bytes.copy(keyBuffer, 0, this.keyLength)),<a name="line.274"></a>
-<span class="sourceLineNo">275</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.276"></a>
-<span class="sourceLineNo">277</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer, vOffset,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          this.valueLength, memstoreTS, tagsBuf, tOffset, this.tagsLength);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    }<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * Copies only the key part of the keybuffer by doing a deep copy and passes the<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   * seeker state members for taking a clone.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Note that the value byte[] part is still pointing to the currentBuffer and<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * represented by the valueOffset and valueLength<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  // We return this as a Cell to the upper layers of read flow and might try setting a new SeqId<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  // there. So this has to be an instance of SettableSequenceId.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  protected static class OnheapDecodedCell implements Cell, HeapSize, SettableSequenceId,<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      Streamable {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.293"></a>
-<span class="sourceLineNo">294</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY));<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    private byte[] keyOnlyBuffer;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    private short rowLength;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    private int familyOffset;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    private byte familyLength;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    private int qualifierOffset;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    private int qualifierLength;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    private long timestamp;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    private byte typeByte;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    private byte[] valueBuffer;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    private int valueOffset;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    private int valueLength;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    private byte[] tagsBuffer;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    private int tagsOffset;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    private int tagsLength;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    private long seqId;<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>    protected OnheapDecodedCell(byte[] keyBuffer, short rowLength, int familyOffset,<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        byte[] valueBuffer, int valueOffset, int valueLen, long seqId, byte[] tagsBuffer,<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        int tagsOffset, int tagsLength) {<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      this.keyOnlyBuffer = keyBuffer;<a name="line.315"></a>
-<span class="sourceLineNo">316</span>      this.rowLength = rowLength;<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      this.familyOffset = familyOffset;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.familyLength = familyLength;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>      this.qualifierOffset = qualOffset;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.qualifierLength = qualLength;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>      this.timestamp = timeStamp;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      this.typeByte = typeByte;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>      this.valueBuffer = valueBuffer;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      this.valueOffset = valueOffset;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      this.valueLength = valueLen;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      this.tagsBuffer = tagsBuffer;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      this.tagsOffset = tagsOffset;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      this.tagsLength = tagsLength;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      setSequenceId(seqId);<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>    @Override<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    public byte[] getRowArray() {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      return keyOnlyBuffer;<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    }<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>    @Override<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    public byte[] getFamilyArray() {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      return keyOnlyBuffer;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>    @Override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    public byte[] getQualifierArray() {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>      return keyOnlyBuffer;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
-<span class="sourceLineNo">346</span><a name="line.346"></a>
-<span class="sourceLineNo">347</span>    @Override<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    public int getRowOffset() {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      return Bytes.SIZEOF_SHORT;<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>    @Override<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    public short getRowLength() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      return rowLength;<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>    @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    public int getFamilyOffset() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return familyOffset;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>    @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    public byte getFamilyLength() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      return familyLength;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>    @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    public int getQualifierOffset() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      return qualifierOffset;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    }<a name="line.370"></a>
-<span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>    @Override<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    public int getQualifierLength() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      return qualifierLength;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>    @Override<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    public long getTimestamp() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      return timestamp;<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>    @Override<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    public byte getTypeByte() {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return typeByte;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>    @Override<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    public long getSequenceId() {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      return seqId;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>    @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    public byte[] getValueArray() {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      return this.valueBuffer;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    @Override<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    public int getValueOffset() {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      return valueOffset;<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>    @Override<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    public int getValueLength() {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      return valueLength;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    @Override<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    public byte[] getTagsArray() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      return this.tagsBuffer;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    @Override<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    public int getTagsOffset() {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      return this.tagsOffset;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    }<a name="line.415"></a>
-<span class="sourceLineNo">416</span><a name="line.416"></a>
-<span class="sourceLineNo">417</span>    @Override<a name="line.417"></a>
-<span class="sourceLineNo">418</span>    public int getTagsLength() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      return tagsLength;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    @Override<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public String toString() {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          + getValueLength() + "/seqid=" + seqId;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    public void setSequenceId(long seqId) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      this.seqId = seqId;<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    public long heapSize() {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    public int write(OutputStream out) throws IOException {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      return write(out, true);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>          tagsLength, withTags);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      ByteBufferUtils.putInt(out, keyOnlyBuffer.length);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // Write key<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      out.write(keyOnlyBuffer);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      // Write value<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      out.write(this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      if (withTags) {<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        // 2 bytes tags length followed by tags bytes<a name="line.455"></a>
-<span class="sourceLineNo">456</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        out.write(this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  protected static class OffheapDecodedCell extends ByteBufferedCell implements HeapSize,<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      SettableSequenceId, Streamable {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.BYTE_BUFFER));<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    private ByteBuffer keyBuffer;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    private short rowLength;<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    private int familyOffset;<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    private byte familyLength;<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    private int qualifierOffset;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    private int qualifierLength;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    private long timestamp;<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    private byte typeByte;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    private ByteBuffer valueBuffer;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    private int valueOffset;<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    private int valueLength;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private ByteBuffer tagsBuffer;<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    private int tagsOffset;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    private int tagsLength;<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    private long seqId;<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>    protected OffheapDecodedCell(ByteBuffer keyBuffer, short rowLength, int familyOffset,<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.488"></a>
-<span class="sourceLineNo">489</span>        ByteBuffer valueBuffer, int valueOffset, int valueLen, long seqId, ByteBuffer tagsBuffer,<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        int tagsOffset, int tagsLength) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      // The keyBuffer is always onheap<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      assert keyBuffer.hasArray();<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      assert keyBuffer.arrayOffset() == 0;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      this.keyBuffer = keyBuffer;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      this.rowLength = rowLength;<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      this.familyOffset = familyOffset;<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      this.familyLength = familyLength;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      this.qualifierOffset = qualOffset;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      this.qualifierLength = qualLength;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      this.timestamp = timeStamp;<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      this.typeByte = typeByte;<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      this.valueBuffer = valueBuffer;<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      this.valueOffset = valueOffset;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      this.valueLength = valueLen;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      this.tagsBuffer = tagsBuffer;<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      this.tagsOffset = tagsOffset;<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      this.tagsLength = tagsLength;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      setSequenceId(seqId);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public byte[] getRowArray() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return this.keyBuffer.array();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getRowOffset() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return getRowPosition();<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public short getRowLength() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowLength;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span><a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public byte[] getFamilyArray() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return this.keyBuffer.array();<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public int getFamilyOffset() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return getFamilyPosition();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public byte getFamilyLength() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return this.familyLength;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span><a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public byte[] getQualifierArray() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.keyBuffer.array();<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierOffset() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return getQualifierPosition();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public int getQualifierLength() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.qualifierLength;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public long getTimestamp() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return this.timestamp;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public byte getTypeByte() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.typeByte;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public long getSequenceId() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.seqId;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public byte[] getValueArray() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return CellUtil.cloneValue(this);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public int getValueOffset() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      return 0;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>    @Override<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    public int getValueLength() {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return this.valueLength;<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>    @Override<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    public byte[] getTagsArray() {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      return CellUtil.cloneTags(this);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    }<a name="line.589"></a>
-<span class="sourceLineNo">590</span><a name="line.590"></a>
-<span class="sourceLineNo">591</span>    @Override<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    public int getTagsOffset() {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      return 0;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    }<a name="line.594"></a>
-<span class="sourceLineNo">595</span><a name="line.595"></a>
-<span class="sourceLineNo">596</span>    @Override<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    public int getTagsLength() {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      return this.tagsLength;<a name="line.598"></a>
-<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
-<span class="sourceLineNo">600</span><a name="line.600"></a>
-<span class="sourceLineNo">601</span>    @Override<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    public ByteBuffer getRowByteBuffer() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return this.keyBuffer;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    @Override<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    public int getRowPosition() {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      return Bytes.SIZEOF_SHORT;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    @Override<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      return this.keyBuffer;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>    @Override<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    public int getFamilyPosition() {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      return this.familyOffset;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    }<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    @Override<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      return this.keyBuffer;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    }<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>    @Override<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    public int getQualifierPosition() {<a name="line.627"></a>
-<span class="sourceLineNo">628</span>      return this.qualifierOffset;<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    @Override<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    public ByteBuffer getValueByteBuffer() {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      return this.valueBuffer;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
-<span class="sourceLineNo">635</span><a name="line.635"></a>
-<span class="sourceLineNo">636</span>    @Override<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    public int getValuePosition() {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this.valueOffset;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    @Override<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      return this.tagsBuffer;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
-<span class="sourceLineNo">645</span><a name="line.645"></a>
-<span class="sourceLineNo">646</span>    @Override<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    public int getTagsPosition() {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return this.tagsOffset;<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
-<span class="sourceLineNo">650</span><a name="line.650"></a>
-<span class="sourceLineNo">651</span>    @Override<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    public long heapSize() {<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>    @Override<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    public void setSequenceId(long seqId) {<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      this.seqId = seqId;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span><a name="line.660"></a>
-<span class="sourceLineNo">661</span>    @Override<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    public int write(OutputStream out) throws IOException {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>      return write(out, true);<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    }<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    @Override<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          tagsLength, withTags);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      ByteBufferUtils.putInt(out, keyBuffer.capacity());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      // Write key<a name="line.673"></a>
-<span class="sourceLineNo">674</span>      out.write(keyBuffer.array());<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      // Write value<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      ByteBufferUtils.copyBufferToStream(out, this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      if (withTags) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>        // 2 bytes tags length followed by tags bytes<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.682"></a>
-<span class="sourceLineNo">683</span>        ByteBufferUtils.copyBufferToStream(out, this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  protected abstract static class<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      BufferedEncodedSeeker&lt;STATE extends SeekerState&gt;<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      implements EncodedSeeker {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    protected HFileBlockDecodingContext decodingCtx;<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    protected final CellComparator comparator;<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    protected ByteBuff currentBuffer;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    protected TagCompressionContext tagCompressionContext = null;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    protected  KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    // many object creations.<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    protected final ObjectIntPair&lt;ByteBuffer&gt; tmpPair = new ObjectIntPair&lt;ByteBuffer&gt;();<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    protected STATE current, previous;<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>    public BufferedEncodedSeeker(CellComparator comparator,<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        HFileBlockDecodingContext decodingCtx) {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      this.comparator = comparator;<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.decodingCtx = decodingCtx;<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      if (decodingCtx.getHFileContext().isCompressTags()) {<a name="line.706"></a>
-<span class="sourceLineNo">707</span>        try {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>          tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>        } catch (Exception e) {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>          throw new RuntimeException("Failed to initialize TagCompressionContext", e);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>        }<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      }<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      current = createSeekerState(); // always valid<a name="line.713"></a>
-<span class="sourceLineNo">714</span>      previous = createSeekerState(); // may not be valid<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    }<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>    protected boolean includesMvcc() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>      return this.decodingCtx.getHFileContext().isIncludesMvcc();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    }<a name="line.719"></a>
-<span class="sourceLineNo">720</span><a name="line.720"></a>
-<span class="sourceLineNo">721</span>    protected boolean includesTags() {<a name="line.721"></a>
-<span class="sourceLineNo">722</span>      return this.decodingCtx.getHFileContext().isIncludesTags();<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    }<a name="line.723"></a>
-<span class="sourceLineNo">724</span><a name="line.724"></a>
-<span class="sourceLineNo">725</span>    @Override<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    public int compareKey(CellComparator comparator, Cell key) {<a name="line.726"></a>
-<span class="sourceLineNo">727</span>      keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return comparator.compareKeyIgnoresMvcc(key, keyOnlyKV);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    public void setCurrentBuffer(ByteBuff buffer) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      if (this.tagCompressionContext != null) {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        this.tagCompressionContext.clear();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      currentBuffer = buffer;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>      current.currentBuffer = currentBuffer;<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      if(tagCompressionContext != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>        current.tagCompressionContext = tagCompressionContext;<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>      decodeFirst();<a name="line.741"></a>
-<span class="sourceLineNo">742</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      previous.invalidate();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public Cell getKey() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      byte[] key = new byte[current.keyLength];<a name="line.748"></a>
-<span class="sourceLineNo">749</span>      System.arraycopy(current.keyBuffer, 0, key, 0, current.keyLength);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return new KeyValue.KeyOnlyKeyValue(key);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public ByteBuffer getValueShallowCopy() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      currentBuffer.asSubByteBuffer(current.valueOffset, current.valueLength, tmpPair);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      ByteBuffer dup = tmpPair.getFirst().duplicate();<a name="line.756"></a>
-<span class="sourceLineNo">757</span>      dup.position(tmpPair.getSecond());<a name="line.757"></a>
-<span class="sourceLineNo">758</span>      dup.limit(tmpPair.getSecond() + current.valueLength);<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      return dup.slice();<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    public Cell getCell() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return current.toCell();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span><a name="line.766"></a>
-<span class="sourceLineNo">767</span>    @Override<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    public void rewind() {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      currentBuffer.rewind();<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      if (tagCompressionContext != null) {<a name="line.770"></a>
-<span class="sourceLineNo">771</span>        tagCompressionContext.clear();<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      }<a name="line.772"></a>
-<span class="sourceLineNo">773</span>      decodeFirst();<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      previous.invalidate();<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public boolean next() {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      if (!currentBuffer.hasRemaining()) {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        return false;<a name="line.781"></a>
-<span class="sourceLineNo">782</span>      }<a name="line.782"></a>
-<span class="sourceLineNo">783</span>      decodeNext();<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      previous.invalidate();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>      return true;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span><a name="line.788"></a>
-<span class="sourceLineNo">789</span>    protected void decodeTags() {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      current.tagsLength = ByteBuff.readCompressedInt(currentBuffer);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      if (tagCompressionContext != null) {<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        if (current.uncompressTags) {<a name="line.792"></a>
-<span class="sourceLineNo">793</span>          // Tag compression is been used. uncompress it into tagsBuffer<a name="line.793"></a>
-<span class="sourceLineNo">794</span>          current.ensureSpaceForTags();<a name="line.794"></a>
-<span class="sourceLineNo">795</span>          try {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            current.tagsCompressedLength = tagCompressionContext.uncompressTags(currentBuffer,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>                current.tagsBuffer, 0, current.tagsLength);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>          } catch (IOException e) {<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            throw new RuntimeException("Exception while uncompressing tags", e);<a name="line.799"></a>
-<span class="sourceLineNo">800</span>          }<a name="line.800"></a>
-<span class="sourceLineNo">801</span>        } else {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>          currentBuffer.skip(current.tagsCompressedLength);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>          current.uncompressTags = true;// Reset this.<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        }<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        current.tagsOffset = -1;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>      } else {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        // When tag compress is not used, let us not do copying of tags bytes into tagsBuffer.<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // Just mark the tags Offset so as to create the KV buffer later in getKeyValueBuffer()<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        current.tagsOffset = currentBuffer.position();<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        currentBuffer.skip(current.tagsLength);<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      }<a name="line.811"></a>
-<span class="sourceLineNo">812</span>    }<a name="line.812"></a>
-<span class="sourceLineNo">813</span><a name="line.813"></a>
-<span class="sourceLineNo">814</span>    @Override<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      int rowCommonPrefix = 0;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      int familyCommonPrefix = 0;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      int qualCommonPrefix = 0;<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      previous.invalidate();<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      do {<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        int comp;<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.822"></a>
-<span class="sourceLineNo">823</span>        if (current.lastCommonPrefix != 0) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>          // The KV format has row key length also in the byte array. The<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          // common prefix<a name="line.825"></a>
-<span class="sourceLineNo">826</span>          // includes it. So we need to subtract to find out the common prefix<a name="line.826"></a>
-<span class="sourceLineNo">827</span>          // in the<a name="line.827"></a>
-<span class="sourceLineNo">828</span>          // row part alone<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          rowCommonPrefix = Math.min(rowCommonPrefix, current.lastCommonPrefix - 2);<a name="line.829"></a>
-<span class="sourceLineNo">830</span>        }<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        if (current.lastCommonPrefix &lt;= 2) {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>          rowCommonPrefix = 0;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>        }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        rowCommonPrefix += findCommonPrefixInRowPart(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        comp = compareCommonRowPrefix(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        if (comp == 0) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          comp = compareTypeBytes(seekCell, keyOnlyKV);<a name="line.837"></a>
-<span class="sourceLineNo">838</span>          if (comp == 0) {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>            // Subtract the fixed row key length and the family key fixed length<a name="line.839"></a>
-<span class="sourceLineNo">840</span>            familyCommonPrefix = Math.max(<a name="line.840"></a>
-<span class="sourceLineNo">841</span>                0,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>                Math.min(familyCommonPrefix,<a name="line.842"></a>
-<span class="sourceLineNo">843</span>                    current.lastCommonPrefix - (3 + keyOnlyKV.getRowLength())));<a name="line.843"></a>
-<span class="sourceLineNo">844</span>            familyCommonPrefix += findCommonPrefixInFamilyPart(seekCell, keyOnlyKV,<a name="line.844"></a>
-<span class="sourceLineNo">845</span>                familyCommonPrefix);<a name="line.845"></a>
-<span class="sourceLineNo">846</span>            comp = compareCommonFamilyPrefix(seekCell, keyOnlyKV, familyCommonPrefix);<a name="line.846"></a>
-<span class="sourceLineNo">847</span>            if (comp == 0) {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>              // subtract the rowkey fixed length and the family key fixed<a name="line.848"></a>
-<span class="sourceLineNo">849</span>              // length<a name="line.849"></a>
-<span class="sourceLineNo">850</span>              qualCommonPrefix = Math.max(<a name="line.850"></a>
-<span class="sourceLineNo">851</span>                  0,<a name="line.851"></a>
-<span class="sourceLineNo">852</span>                  Math.min(<a name="line.852"></a>
-<span class="sourceLineNo">853</span>                      qualCommonPrefix,<a name="line.853"></a>
-<span class="sourceLineNo">854</span>                      current.lastCommonPrefix<a name="line.854"></a>
-<span class="sourceLineNo">855</span>                          - (3 + keyOnlyKV.getRowLength() + keyOnlyKV.getFamilyLength())));<a name="line.855"></a>
-<span class="sourceLineNo">856</span>              qualCommonPrefix += findCommonPrefixInQualifierPart(seekCell, keyOnlyKV,<a name="line.856"></a>
-<span class="sourceLineNo">857</span>                  qualCommonPrefix);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>              comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>              if (comp == 0) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>                comp = CellComparator.compareTimestamps(seekCell, keyOnlyKV);<a name="line.860"></a>
-<span class="sourceLineNo">861</span>                if (comp == 0) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>                  // Compare types. Let the delete types sort ahead of puts;<a name="line.862"></a>
-<span class="sourceLineNo">863</span>                  // i.e. types<a name="line.863"></a>
-<span class="sourceLineNo">864</span>                  // of higher numbers sort before those of lesser numbers.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>                  // Maximum<a name="line.865"></a>
-<span class="sourceLineNo">866</span>                  // (255)<a name="line.866"></a>
-<span class="sourceLineNo">867</span>                  // appears ahead of everything, and minimum (0) appears<a name="line.867"></a>
-<span class="sourceLineNo">868</span>                  // after<a name="line.868"></a>
-<span class="sourceLineNo">869</span>                  // everything.<a name="line.869"></a>
-<span class="sourceLineNo">870</span>                  comp = (0xff &amp; keyOnlyKV.getTypeByte()) - (0xff &amp; seekCell.getTypeByte());<a name="line.870"></a>
-<span class="sourceLineNo">871</span>                }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>              }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            }<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        if (comp == 0) { // exact match<a name="line.876"></a>
-<span class="sourceLineNo">877</span>          if (seekBefore) {<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            if (!previous.isValid()) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>              // The caller (seekBefore) has to ensure that we are not at the<a name="line.879"></a>
-<span class="sourceLineNo">880</span>              // first key in the block.<a name="line.880"></a>
-<span class="sourceLineNo">881</span>              throw new IllegalStateException("Cannot seekBefore if "<a name="line.881"></a>
-<span class="sourceLineNo">882</span>                  + "positioned at the first key in the block: key="<a name="line.882"></a>
-<span class="sourceLineNo">883</span>                  + Bytes.toStringBinary(seekCell.getRowArray()));<a name="line.883"></a>
-<span class="sourceLineNo">884</span>            }<a name="line.884"></a>
-<span class="sourceLineNo">885</span>            moveToPrevious();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>            return 1;<a name="line.886"></a>
-<span class="sourceLineNo">887</span>          }<a name="line.887"></a>
-<span class="sourceLineNo">888</span>          return 0;<a name="line.888"></a>
-<span class="sourceLineNo">889</span>        }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>        if (comp &lt; 0) { // already too large, check previous<a name="line.891"></a>
-<span class="sourceLineNo">892</span>          if (previous.isValid()) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>            moveToPrevious();<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          } else {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>            return HConstants.INDEX_KEY_MAGIC; // using optimized index key<a name="line.895"></a>
-<span class="sourceLineNo">896</span>          }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>          return 1;<a name="line.897"></a>
-<span class="sourceLineNo">898</span>        }<a name="line.898"></a>
-<span class="sourceLineNo">899</span><a name="line.899"></a>
-<span class="sourceLineNo">900</span>        // move to next, if more data is available<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        if (currentBuffer.hasRemaining()) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          previous.copyFromNext(current);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>          decodeNext();<a name="line.903"></a>
-<span class="sourceLineNo">904</span>          current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.904"></a>
-<span class="sourceLineNo">905</span>        } else {<a name="line.905"></a>
-<span class="sourceLineNo">906</span>          break;<a name="line.906"></a>
-<span class="sourceLineNo">907</span>        }<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      } while (true);<a name="line.908"></a>
-<span class="sourceLineNo">909</span><a name="line.909"></a>
-<span class="sourceLineNo">910</span>      // we hit the end of the block, not an exact match<a name="line.910"></a>
-<span class="sourceLineNo">911</span>      return 1;<a name="line.911"></a>
-<span class="sourceLineNo">912</span>    }<a name="line.912"></a>
-<span class="sourceLineNo">913</span><a name="line.913"></a>
-<span class="sourceLineNo">914</span>    private int compareTypeBytes(Cell key, Cell right) {<a name="line.914"></a>
-<span class="sourceLineNo">915</span>      if (key.getFamilyLength() + key.getQualifierLength() == 0<a name="line.915"></a>
-<span class="sourceLineNo">916</span>          &amp;&amp; key.getTypeByte() == Type.Minimum.getCode()) {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>        // left is "bigger", i.e. it appears later in the sorted order<a name="line.917"></a>
-<span class="sourceLineNo">918</span>        return 1;<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      }<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      if (right.getFamilyLength() + right.getQualifierLength() == 0<a name="line.920"></a>
-<span class="sourceLineNo">921</span>          &amp;&amp; right.getTypeByte() == Type.Minimum.getCode()) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>        return -1;<a name="line.922"></a>
-<span class="sourceLineNo">923</span>      }<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      return 0;<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span><a name="line.926"></a>
-<span class="sourceLineNo">927</span>    private static int findCommonPrefixInRowPart(Cell left, Cell right, int rowCommonPrefix) {<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      return Bytes.findCommonPrefix(left.getRowArray(), right.getRowArray(), left.getRowLength()<a name="line.928"></a>
-<span class="sourceLineNo">929</span>          - rowCommonPrefix, right.getRowLength() - rowCommonPrefix, left.getRowOffset()<a name="line.929"></a>
-<span class="sourceLineNo">930</span>          + rowCommonPrefix, right.getRowOffset() + rowCommonPrefix);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    }<a name="line.931"></a>
-<span class="sourceLineNo">932</span><a name="line.932"></a>
-<span class="sourceLineNo">933</span>    private static int findCommonPrefixInFamilyPart(Cell left, Cell right, int familyCommonPrefix) {<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      return Bytes<a name="line.934"></a>
-<span class="sourceLineNo">935</span>          .findCommonPrefix(left.getFamilyArray(), right.getFamilyArray(), left.getFamilyLength()<a name="line.935"></a>
-<span class="sourceLineNo">936</span>              - familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix,<a name="line.936"></a>
-<span class="sourceLineNo">937</span>              left.getFamilyOffset() + familyCommonPrefix, right.getFamilyOffset()<a name="line.937"></a>
-<span class="sourceLineNo">938</span>                  + familyCommonPrefix);<a name="line.938"></a>
-<span class="sourceLineNo">939</span>    }<a name="line.939"></a>
-<span class="sourceLineNo">940</span><a name="line.940"></a>
-<span class="sourceLineNo">941</span>    private static int findCommonPrefixInQualifierPart(Cell left, Cell right,<a name="line.941"></a>
-<span class="sourceLineNo">942</span>        int qualifierCommonPrefix) {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      return Bytes.findCommonPrefix(left.getQualifierArray(), right.getQualifierArray(),<a name="line.943"></a>
-<span class="sourceLineNo">944</span>          left.getQualifierLength() - qualifierCommonPrefix, right.getQualifierLength()<a name="line.944"></a>
-<span class="sourceLineNo">945</span>              - qualifierCommonPrefix, left.getQualifierOffset() + qualifierCommonPrefix,<a name="line.945"></a>
-<span class="sourceLineNo">946</span>          right.getQualifierOffset() + qualifierCommonPrefix);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    private void moveToPrevious() {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!previous.isValid()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        throw new IllegalStateException(<a name="line.951"></a>
-<span class="sourceLineNo">952</span>            "Can move back only once and not in first key in the block.");<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      }<a name="line.953"></a>
-<span class="sourceLineNo">954</span><a name="line.954"></a>
-<span class="sourceLineNo">955</span>      STATE tmp = previous;<a name="line.955"></a>
-<span class="sourceLineNo">956</span>      previous = current;<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      current = tmp;<a name="line.957"></a>
-<span class="sourceLineNo">958</span><a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // move after last key value<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      currentBuffer.position(current.nextKvOffset);<a name="line.960"></a>
-<span class="sou

<TRUNCATED>

[24/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
index de0d003..ea9c5c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
@@ -73,1205 +73,1207 @@
 <span class="sourceLineNo">065</span>  public static final byte[] RPC_HEADER = new byte[] { 'H', 'B', 'a', 's' };<a name="line.65"></a>
 <span class="sourceLineNo">066</span>  public static final byte RPC_CURRENT_VERSION = 0;<a name="line.66"></a>
 <span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  // HFileBlock constants.<a name="line.68"></a>
-<span class="sourceLineNo">069</span><a name="line.69"></a>
-<span class="sourceLineNo">070</span>  /** The size data structures with minor version is 0 */<a name="line.70"></a>
-<span class="sourceLineNo">071</span>  public static final int HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      + Bytes.SIZEOF_LONG;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  /** The size of a version 2 HFile block header, minor version 1.<a name="line.73"></a>
-<span class="sourceLineNo">074</span>   * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum<a name="line.74"></a>
-<span class="sourceLineNo">075</span>   * followed by another 4 byte value to store sizeofDataOnDisk.<a name="line.75"></a>
-<span class="sourceLineNo">076</span>   */<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  public static final int HFILEBLOCK_HEADER_SIZE = HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM +<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    Bytes.SIZEOF_BYTE + 2 * Bytes.SIZEOF_INT;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  /** Just an array of bytes of the right size. */<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  public static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HFILEBLOCK_HEADER_SIZE];<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  //End HFileBlockConstants.<a name="line.82"></a>
+<span class="sourceLineNo">068</span>  // HFileBlock constants. TODO!!!! THESE DEFINES BELONG IN HFILEBLOCK, NOT UP HERE.<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  // Needed down in hbase-common though by encoders but these encoders should not be dealing<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  // in the internals of hfileblocks. Fix encapsulation.<a name="line.70"></a>
+<span class="sourceLineNo">071</span><a name="line.71"></a>
+<span class="sourceLineNo">072</span>  /** The size data structures with minor version is 0 */<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  public static final int HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      + Bytes.SIZEOF_LONG;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  /** The size of a version 2 HFile block header, minor version 1.<a name="line.75"></a>
+<span class="sourceLineNo">076</span>   * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   * followed by another 4 byte value to store sizeofDataOnDisk.<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   */<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  public static final int HFILEBLOCK_HEADER_SIZE = HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM +<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    Bytes.SIZEOF_BYTE + 2 * Bytes.SIZEOF_INT;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  /** Just an array of bytes of the right size. */<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HFILEBLOCK_HEADER_SIZE];<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * Status codes used for return values of bulk operations.<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  @InterfaceAudience.Private<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  public enum OperationStatusCode {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    NOT_RUN,<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    SUCCESS,<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    BAD_FAMILY,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    SANITY_CHECK_FAILURE,<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    FAILURE;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  /** long constant for zero */<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  public static final Long ZERO_L = Long.valueOf(0L);<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final String NINES = "99999999999999";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final String ZEROES = "00000000000000";<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  // For migration<a name="line.101"></a>
+<span class="sourceLineNo">084</span>  //End HFileBlockConstants.<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
+<span class="sourceLineNo">087</span>   * Status codes used for return values of bulk operations.<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   */<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  @InterfaceAudience.Private<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  public enum OperationStatusCode {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    NOT_RUN,<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    SUCCESS,<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    BAD_FAMILY,<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    SANITY_CHECK_FAILURE,<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    FAILURE;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  }<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /** long constant for zero */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  public static final Long ZERO_L = Long.valueOf(0L);<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  public static final String NINES = "99999999999999";<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  public static final String ZEROES = "00000000000000";<a name="line.101"></a>
 <span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  /** name of version file */<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  public static final String VERSION_FILE_NAME = "hbase.version";<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * Current version of file system.<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * Version 4 supports only one kind of bloom filter.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * Version 5 changes versions in catalog table regions.<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * Version 6 enables blockcaching on catalog tables.<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Version 7 introduces hfile -- hbase 0.19 to 0.20..<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * Version 8 introduces namespace<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  // public static final String FILE_SYSTEM_VERSION = "6";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  public static final String FILE_SYSTEM_VERSION = "8";<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  // Configuration parameters<a name="line.117"></a>
+<span class="sourceLineNo">103</span>  // For migration<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  /** name of version file */<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public static final String VERSION_FILE_NAME = "hbase.version";<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>  /**<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * Current version of file system.<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * Version 4 supports only one kind of bloom filter.<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   * Version 5 changes versions in catalog table regions.<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Version 6 enables blockcaching on catalog tables.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * Version 7 introduces hfile -- hbase 0.19 to 0.20..<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * Version 8 introduces namespace<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  // public static final String FILE_SYSTEM_VERSION = "6";<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public static final String FILE_SYSTEM_VERSION = "8";<a name="line.117"></a>
 <span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  //TODO: Is having HBase homed on port 60k OK?<a name="line.119"></a>
+<span class="sourceLineNo">119</span>  // Configuration parameters<a name="line.119"></a>
 <span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  /** Cluster is in distributed mode or not */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  public static final String CLUSTER_DISTRIBUTED = "hbase.cluster.distributed";<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  /** Config for pluggable load balancers */<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static final String HBASE_MASTER_LOADBALANCER_CLASS = "hbase.master.loadbalancer.class";<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  /** Config for balancing the cluster by table */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public static final String HBASE_MASTER_LOADBALANCE_BYTABLE = "hbase.master.loadbalance.bytable";<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /** The name of the ensemble table */<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  public static final String ENSEMBLE_TABLE_NAME = "hbase:ensemble";<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>  /** Config for pluggable region normalizer */<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  public static final String HBASE_MASTER_NORMALIZER_CLASS =<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    "hbase.master.normalizer.class";<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  /** Cluster is standalone or pseudo-distributed */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static final boolean CLUSTER_IS_LOCAL = false;<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  /** Cluster is fully-distributed */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  public static final boolean CLUSTER_IS_DISTRIBUTED = true;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>  /** Default value for cluster distributed mode */<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public static final boolean DEFAULT_CLUSTER_DISTRIBUTED = CLUSTER_IS_LOCAL;<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>  /** default host address */<a name="line.146"></a>
-<span class="sourceLineNo">147</span>  public static final String DEFAULT_HOST = "0.0.0.0";<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  /** Parameter name for port master listens on. */<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  public static final String MASTER_PORT = "hbase.master.port";<a name="line.150"></a>
-<span class="sourceLineNo">151</span><a name="line.151"></a>
-<span class="sourceLineNo">152</span>  /** default port that the master listens on */<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  public static final int DEFAULT_MASTER_PORT = 16000;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  /** default port for master web api */<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  public static final int DEFAULT_MASTER_INFOPORT = 16010;<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>  /** Configuration key for master web API port */<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public static final String MASTER_INFO_PORT = "hbase.master.info.port";<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>  /** Parameter name for the master type being backup (waits for primary to go inactive). */<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   * by default every master is a possible primary master unless the conf explicitly overrides it<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   */<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  /** Name of ZooKeeper quorum configuration parameter. */<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  public static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  /** Common prefix of ZooKeeper configuration properties */<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  public static final String ZK_CFG_PROPERTY_PREFIX =<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      "hbase.zookeeper.property.";<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  public static final int ZK_CFG_PROPERTY_PREFIX_LEN =<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      ZK_CFG_PROPERTY_PREFIX.length();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  /**<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * The ZK client port key in the ZK properties map. The name reflects the<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * fact that this is not an HBase configuration key.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  public static final String CLIENT_PORT_STR = "clientPort";<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /** Parameter name for the client port that the zookeeper listens on */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  public static final String ZOOKEEPER_CLIENT_PORT =<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      ZK_CFG_PROPERTY_PREFIX + CLIENT_PORT_STR;<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  /** Default client port that the zookeeper listens on */<a name="line.189"></a>
-<span class="sourceLineNo">190</span>  public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>  /**<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * Parameter name for the wait time for the recoverable zookeeper<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      "hbase.zookeeper.recoverable.waittime";<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>  /** Default wait time for the recoverable zookeeper */<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  /** Parameter name for the root dir in ZK for this cluster */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent";<a name="line.202"></a>
-<span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  public static final String DEFAULT_ZOOKEEPER_ZNODE_PARENT = "/hbase";<a name="line.204"></a>
+<span class="sourceLineNo">121</span>  //TODO: Is having HBase homed on port 60k OK?<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  /** Cluster is in distributed mode or not */<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public static final String CLUSTER_DISTRIBUTED = "hbase.cluster.distributed";<a name="line.124"></a>
+<span class="sourceLineNo">125</span><a name="line.125"></a>
+<span class="sourceLineNo">126</span>  /** Config for pluggable load balancers */<a name="line.126"></a>
+<span class="sourceLineNo">127</span>  public static final String HBASE_MASTER_LOADBALANCER_CLASS = "hbase.master.loadbalancer.class";<a name="line.127"></a>
+<span class="sourceLineNo">128</span><a name="line.128"></a>
+<span class="sourceLineNo">129</span>  /** Config for balancing the cluster by table */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public static final String HBASE_MASTER_LOADBALANCE_BYTABLE = "hbase.master.loadbalance.bytable";<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /** The name of the ensemble table */<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  public static final String ENSEMBLE_TABLE_NAME = "hbase:ensemble";<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  /** Config for pluggable region normalizer */<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public static final String HBASE_MASTER_NORMALIZER_CLASS =<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    "hbase.master.normalizer.class";<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  /** Cluster is standalone or pseudo-distributed */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static final boolean CLUSTER_IS_LOCAL = false;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>  /** Cluster is fully-distributed */<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public static final boolean CLUSTER_IS_DISTRIBUTED = true;<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  /** Default value for cluster distributed mode */<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public static final boolean DEFAULT_CLUSTER_DISTRIBUTED = CLUSTER_IS_LOCAL;<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>  /** default host address */<a name="line.148"></a>
+<span class="sourceLineNo">149</span>  public static final String DEFAULT_HOST = "0.0.0.0";<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>  /** Parameter name for port master listens on. */<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  public static final String MASTER_PORT = "hbase.master.port";<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>  /** default port that the master listens on */<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  public static final int DEFAULT_MASTER_PORT = 16000;<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  /** default port for master web api */<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  public static final int DEFAULT_MASTER_INFOPORT = 16010;<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>  /** Configuration key for master web API port */<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  public static final String MASTER_INFO_PORT = "hbase.master.info.port";<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>  /** Parameter name for the master type being backup (waits for primary to go inactive). */<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>  /**<a name="line.166"></a>
+<span class="sourceLineNo">167</span>   * by default every master is a possible primary master unless the conf explicitly overrides it<a name="line.167"></a>
+<span class="sourceLineNo">168</span>   */<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>  /** Name of ZooKeeper quorum configuration parameter. */<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  public static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";<a name="line.172"></a>
+<span class="sourceLineNo">173</span><a name="line.173"></a>
+<span class="sourceLineNo">174</span>  /** Common prefix of ZooKeeper configuration properties */<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  public static final String ZK_CFG_PROPERTY_PREFIX =<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      "hbase.zookeeper.property.";<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  public static final int ZK_CFG_PROPERTY_PREFIX_LEN =<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      ZK_CFG_PROPERTY_PREFIX.length();<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  /**<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   * The ZK client port key in the ZK properties map. The name reflects the<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * fact that this is not an HBase configuration key.<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   */<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  public static final String CLIENT_PORT_STR = "clientPort";<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>  /** Parameter name for the client port that the zookeeper listens on */<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public static final String ZOOKEEPER_CLIENT_PORT =<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      ZK_CFG_PROPERTY_PREFIX + CLIENT_PORT_STR;<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>  /** Default client port that the zookeeper listens on */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;<a name="line.192"></a>
+<span class="sourceLineNo">193</span><a name="line.193"></a>
+<span class="sourceLineNo">194</span>  /**<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * Parameter name for the wait time for the recoverable zookeeper<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   */<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      "hbase.zookeeper.recoverable.waittime";<a name="line.198"></a>
+<span class="sourceLineNo">199</span><a name="line.199"></a>
+<span class="sourceLineNo">200</span>  /** Default wait time for the recoverable zookeeper */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  /** Parameter name for the root dir in ZK for this cluster */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent";<a name="line.204"></a>
 <span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Parameter name for the limit on concurrent client-side zookeeper<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * connections<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  public static final String ZOOKEEPER_MAX_CLIENT_CNXNS =<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      ZK_CFG_PROPERTY_PREFIX + "maxClientCnxns";<a name="line.211"></a>
-<span class="sourceLineNo">212</span><a name="line.212"></a>
-<span class="sourceLineNo">213</span>  /** Parameter name for the ZK data directory */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  public static final String ZOOKEEPER_DATA_DIR =<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      ZK_CFG_PROPERTY_PREFIX + "dataDir";<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /** Parameter name for the ZK tick time */<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final String ZOOKEEPER_TICK_TIME =<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ZK_CFG_PROPERTY_PREFIX + "tickTime";<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>  /** Default limit on concurrent client-side zookeeper connections */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  public static final int DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS = 300;<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /** Configuration key for ZooKeeper session timeout */<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  public static final String ZK_SESSION_TIMEOUT = "zookeeper.session.timeout";<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>  /** Default value for ZooKeeper session timeout */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  /** Configuration key for whether to use ZK.multi */<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Parameter name for port region server listens on. */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  public static final String REGIONSERVER_PORT = "hbase.regionserver.port";<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Default port region server listens on. */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  public static final int DEFAULT_REGIONSERVER_PORT = 16020;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** default port for region server web api */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  public static final int DEFAULT_REGIONSERVER_INFOPORT = 16030;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** A configuration key for regionserver info port */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  public static final String REGIONSERVER_INFO_PORT =<a name="line.243"></a>
-<span class="sourceLineNo">244</span>    "hbase.regionserver.info.port";<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>  /** A flag that enables automatic selection of regionserver info port */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static final String REGIONSERVER_INFO_PORT_AUTO =<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      REGIONSERVER_INFO_PORT + ".auto";<a name="line.248"></a>
-<span class="sourceLineNo">249</span><a name="line.249"></a>
-<span class="sourceLineNo">250</span>  /** Parameter name for what region server implementation to use. */<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public static final String REGION_SERVER_IMPL= "hbase.regionserver.impl";<a name="line.251"></a>
-<span class="sourceLineNo">252</span><a name="line.252"></a>
-<span class="sourceLineNo">253</span>  /** Parameter name for what master implementation to use. */<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  public static final String MASTER_IMPL= "hbase.master.impl";<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>  /** Parameter name for what hbase client implementation to use. */<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  public static final String HBASECLIENT_IMPL= "hbase.hbaseclient.impl";<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>  /** Parameter name for how often threads should wake up */<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  public static final String THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  /** Default value for thread wake frequency */<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  public static final int DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>  public static final String VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.268"></a>
-<span class="sourceLineNo">269</span>  public static final int DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>  /** Parameter name for how often a region should should perform a major compaction */<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  public static final String MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  /** Parameter name for the maximum batch of KVs to be used in flushes and compactions */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  public static final String COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public static final int COMPACTION_KV_MAX_DEFAULT = 10;<a name="line.276"></a>
-<span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>  /** Parameter name for HBase instance root directory */<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  public static final String HBASE_DIR = "hbase.rootdir";<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  /** Parameter name for HBase client IPC pool type */<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public static final String HBASE_CLIENT_IPC_POOL_TYPE = "hbase.client.ipc.pool.type";<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /** Parameter name for HBase client IPC pool size */<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  public static final String HBASE_CLIENT_IPC_POOL_SIZE = "hbase.client.ipc.pool.size";<a name="line.285"></a>
-<span class="sourceLineNo">286</span><a name="line.286"></a>
-<span class="sourceLineNo">287</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout";<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  public static final String HBASE_CLIENT_META_OPERATION_TIMEOUT =<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    "hbase.client.meta.operation.timeout";<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  /** Default HBase client operation timeout, which is tantamount to a blocking call */<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  public static final int DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT = 1200000;<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /** Used to construct the name of the log directory for a region server */<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  public static final String HREGION_LOGDIR_NAME = "WALs";<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>  /** Used to construct the name of the splitlog directory for a region server */<a name="line.300"></a>
-<span class="sourceLineNo">301</span>  public static final String SPLIT_LOGDIR_NAME = "splitWAL";<a name="line.301"></a>
-<span class="sourceLineNo">302</span><a name="line.302"></a>
-<span class="sourceLineNo">303</span>  /** Like the previous, but for old logs that are about to be deleted */<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span>  public static final String CORRUPT_DIR_NAME = "corrupt";<a name="line.306"></a>
+<span class="sourceLineNo">206</span>  public static final String DEFAULT_ZOOKEEPER_ZNODE_PARENT = "/hbase";<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>  /**<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * Parameter name for the limit on concurrent client-side zookeeper<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   * connections<a name="line.210"></a>
+<span class="sourceLineNo">211</span>   */<a name="line.211"></a>
+<span class="sourceLineNo">212</span>  public static final String ZOOKEEPER_MAX_CLIENT_CNXNS =<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      ZK_CFG_PROPERTY_PREFIX + "maxClientCnxns";<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>  /** Parameter name for the ZK data directory */<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final String ZOOKEEPER_DATA_DIR =<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      ZK_CFG_PROPERTY_PREFIX + "dataDir";<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  /** Parameter name for the ZK tick time */<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  public static final String ZOOKEEPER_TICK_TIME =<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      ZK_CFG_PROPERTY_PREFIX + "tickTime";<a name="line.221"></a>
+<span class="sourceLineNo">222</span><a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /** Default limit on concurrent client-side zookeeper connections */<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  public static final int DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS = 300;<a name="line.224"></a>
+<span class="sourceLineNo">225</span><a name="line.225"></a>
+<span class="sourceLineNo">226</span>  /** Configuration key for ZooKeeper session timeout */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  public static final String ZK_SESSION_TIMEOUT = "zookeeper.session.timeout";<a name="line.227"></a>
+<span class="sourceLineNo">228</span><a name="line.228"></a>
+<span class="sourceLineNo">229</span>  /** Default value for ZooKeeper session timeout */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  /** Configuration key for whether to use ZK.multi */<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>  /** Parameter name for port region server listens on. */<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  public static final String REGIONSERVER_PORT = "hbase.regionserver.port";<a name="line.236"></a>
+<span class="sourceLineNo">237</span><a name="line.237"></a>
+<span class="sourceLineNo">238</span>  /** Default port region server listens on. */<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  public static final int DEFAULT_REGIONSERVER_PORT = 16020;<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /** default port for region server web api */<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  public static final int DEFAULT_REGIONSERVER_INFOPORT = 16030;<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>  /** A configuration key for regionserver info port */<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  public static final String REGIONSERVER_INFO_PORT =<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    "hbase.regionserver.info.port";<a name="line.246"></a>
+<span class="sourceLineNo">247</span><a name="line.247"></a>
+<span class="sourceLineNo">248</span>  /** A flag that enables automatic selection of regionserver info port */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public static final String REGIONSERVER_INFO_PORT_AUTO =<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      REGIONSERVER_INFO_PORT + ".auto";<a name="line.250"></a>
+<span class="sourceLineNo">251</span><a name="line.251"></a>
+<span class="sourceLineNo">252</span>  /** Parameter name for what region server implementation to use. */<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  public static final String REGION_SERVER_IMPL= "hbase.regionserver.impl";<a name="line.253"></a>
+<span class="sourceLineNo">254</span><a name="line.254"></a>
+<span class="sourceLineNo">255</span>  /** Parameter name for what master implementation to use. */<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public static final String MASTER_IMPL= "hbase.master.impl";<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  /** Parameter name for what hbase client implementation to use. */<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  public static final String HBASECLIENT_IMPL= "hbase.hbaseclient.impl";<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  /** Parameter name for how often threads should wake up */<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  public static final String THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>  /** Default value for thread wake frequency */<a name="line.264"></a>
+<span class="sourceLineNo">265</span>  public static final int DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.267"></a>
+<span class="sourceLineNo">268</span>  public static final String VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  public static final int DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  /** Parameter name for how often a region should should perform a major compaction */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public static final String MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /** Parameter name for the maximum batch of KVs to be used in flushes and compactions */<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public static final String COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  public static final int COMPACTION_KV_MAX_DEFAULT = 10;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>  /** Parameter name for HBase instance root directory */<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  public static final String HBASE_DIR = "hbase.rootdir";<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>  /** Parameter name for HBase client IPC pool type */<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  public static final String HBASE_CLIENT_IPC_POOL_TYPE = "hbase.client.ipc.pool.type";<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /** Parameter name for HBase client IPC pool size */<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  public static final String HBASE_CLIENT_IPC_POOL_SIZE = "hbase.client.ipc.pool.size";<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout";<a name="line.290"></a>
+<span class="sourceLineNo">291</span><a name="line.291"></a>
+<span class="sourceLineNo">292</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  public static final String HBASE_CLIENT_META_OPERATION_TIMEOUT =<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    "hbase.client.meta.operation.timeout";<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
+<span class="sourceLineNo">296</span>  /** Default HBase client operation timeout, which is tantamount to a blocking call */<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  public static final int DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT = 1200000;<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>  /** Used to construct the name of the log directory for a region server */<a name="line.299"></a>
+<span class="sourceLineNo">300</span>  public static final String HREGION_LOGDIR_NAME = "WALs";<a name="line.300"></a>
+<span class="sourceLineNo">301</span><a name="line.301"></a>
+<span class="sourceLineNo">302</span>  /** Used to construct the name of the splitlog directory for a region server */<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  public static final String SPLIT_LOGDIR_NAME = "splitWAL";<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  /** Like the previous, but for old logs that are about to be deleted */<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";<a name="line.306"></a>
 <span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /** Used by HBCK to sideline backup data */<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  public static final String HBCK_SIDELINEDIR_NAME = ".hbck";<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>  /** Any artifacts left from migration can be moved here */<a name="line.311"></a>
-<span class="sourceLineNo">312</span>  public static final String MIGRATION_NAME = ".migration";<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>  /**<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * The directory from which co-processor/custom filter jars can be loaded<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * dynamically by the region servers. This value can be overridden by the<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   * hbase.dynamic.jars.dir config.<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String LIB_DIR = "lib";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  /** Used to construct the name of the compaction directory during compaction */<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  public static final String HREGION_COMPACTIONDIR_NAME = "compaction.dir";<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /** Conf key for the max file size after which we split the region */<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String HREGION_MAX_FILESIZE =<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      "hbase.hregion.max.filesize";<a name="line.326"></a>
-<span class="sourceLineNo">327</span><a name="line.327"></a>
-<span class="sourceLineNo">328</span>  /** Default maximum file size */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  public static final long DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024L;<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  /**<a name="line.331"></a>
-<span class="sourceLineNo">332</span>   * Max size of single row for Get's or Scan's without in-row scanning flag set.<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  public static final String TABLE_MAX_ROWSIZE_KEY = "hbase.table.max.rowsize";<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * Default max row size (1 Gb).<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   */<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  public static final long TABLE_MAX_ROWSIZE_DEFAULT = 1024 * 1024 * 1024L;<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * The max number of threads used for opening and closing stores or store<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * files in parallel<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  public static final String HSTORE_OPEN_AND_CLOSE_THREADS_MAX =<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    "hbase.hstore.open.and.close.threads.max";<a name="line.346"></a>
-<span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  /**<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * The default number for the max number of threads used for opening and<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   * closing stores or store files in parallel<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   */<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  public static final int DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX = 1;<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>  /**<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * Block updates if memstore has hbase.hregion.memstore.block.multiplier<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   * times hbase.hregion.memstore.flush.size bytes.  Useful preventing<a name="line.356"></a>
-<span class="sourceLineNo">357</span>   * runaway memstore during spikes in update traffic.<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   */<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  public static final String HREGION_MEMSTORE_BLOCK_MULTIPLIER =<a name="line.359"></a>
-<span class="sourceLineNo">360</span>          "hbase.hregion.memstore.block.multiplier";<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * Default value for hbase.hregion.memstore.block.multiplier<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  public static final int DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER = 4;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** Conf key for the memstore size at which we flush the memstore */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String HREGION_MEMSTORE_FLUSH_SIZE =<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      "hbase.hregion.memstore.flush.size";<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  public static final String HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      "hbase.hregion.edits.replay.skip.errors";<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  public static final boolean DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      false;<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  /** Maximum value length, enforced on KeyValue construction */<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public static final int MAXIMUM_VALUE_LENGTH = Integer.MAX_VALUE - 1;<a name="line.378"></a>
-<span class="sourceLineNo">379</span><a name="line.379"></a>
-<span class="sourceLineNo">380</span>  /** name of the file for unique cluster ID */<a name="line.380"></a>
-<span class="sourceLineNo">381</span>  public static final String CLUSTER_ID_FILE_NAME = "hbase.id";<a name="line.381"></a>
-<span class="sourceLineNo">382</span><a name="line.382"></a>
-<span class="sourceLineNo">383</span>  /** Default value for cluster ID */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public static final String CLUSTER_ID_DEFAULT = "default-cluster";<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /** Parameter name for # days to keep MVCC values during a major compaction */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  public static final String KEEP_SEQID_PERIOD = "hbase.hstore.compaction.keep.seqId.period";<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  /** At least to keep MVCC values in hfiles for 5 days */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static final int MIN_KEEP_SEQID_PERIOD = 5;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // Always store the location of the root table's HRegion.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // This HRegion is never split.<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>  // region name = table + startkey + regionid. This is the row key.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // each row in the root and meta tables describes exactly 1 region<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  // Do we ever need to know all the information that we are storing?<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // Note that the name of the root table starts with "-" and the name of the<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  // meta table starts with "." Why? it's a trick. It turns out that when we<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  // store region names in memory, we use a SortedMap. Since "-" sorts before<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // "." (and since no other table name can start with either of these<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  // characters, the root region will always be the first entry in such a Map,<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  // followed by all the meta regions (which will be ordered by their starting<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // row key as well), followed by all user tables. So when the Master is<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  // choosing regions to assign, it will always choose the root region first,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  // followed by the meta regions, followed by user regions. Since the root<a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // and meta regions always need to be on-line, this ensures that they will<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  // be the first to be reassigned if the server(s) they are being served by<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  // should go down.<a name="line.409"></a>
-<span class="sourceLineNo">410</span><a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>  /**<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * The hbase:meta table's name.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @deprecated For upgrades of 0.94 to 0.96<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  @Deprecated  // for compat from 0.94 -&gt; 0.96.<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  public static final String BASE_NAMESPACE_DIR = "data";<a name="line.419"></a>
+<span class="sourceLineNo">308</span>  public static final String CORRUPT_DIR_NAME = "corrupt";<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  /** Used by HBCK to sideline backup data */<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  public static final String HBCK_SIDELINEDIR_NAME = ".hbck";<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /** Any artifacts left from migration can be moved here */<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  public static final String MIGRATION_NAME = ".migration";<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  /**<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * The directory from which co-processor/custom filter jars can be loaded<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   * dynamically by the region servers. This value can be overridden by the<a name="line.318"></a>
+<span class="sourceLineNo">319</span>   * hbase.dynamic.jars.dir config.<a name="line.319"></a>
+<span class="sourceLineNo">320</span>   */<a name="line.320"></a>
+<span class="sourceLineNo">321</span>  public static final String LIB_DIR = "lib";<a name="line.321"></a>
+<span class="sourceLineNo">322</span><a name="line.322"></a>
+<span class="sourceLineNo">323</span>  /** Used to construct the name of the compaction directory during compaction */<a name="line.323"></a>
+<span class="sourceLineNo">324</span>  public static final String HREGION_COMPACTIONDIR_NAME = "compaction.dir";<a name="line.324"></a>
+<span class="sourceLineNo">325</span><a name="line.325"></a>
+<span class="sourceLineNo">326</span>  /** Conf key for the max file size after which we split the region */<a name="line.326"></a>
+<span class="sourceLineNo">327</span>  public static final String HREGION_MAX_FILESIZE =<a name="line.327"></a>
+<span class="sourceLineNo">328</span>      "hbase.hregion.max.filesize";<a name="line.328"></a>
+<span class="sourceLineNo">329</span><a name="line.329"></a>
+<span class="sourceLineNo">330</span>  /** Default maximum file size */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  public static final long DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024L;<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * Max size of single row for Get's or Scan's without in-row scanning flag set.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  public static final String TABLE_MAX_ROWSIZE_KEY = "hbase.table.max.rowsize";<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * Default max row size (1 Gb).<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   */<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  public static final long TABLE_MAX_ROWSIZE_DEFAULT = 1024 * 1024 * 1024L;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  /**<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * The max number of threads used for opening and closing stores or store<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * files in parallel<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  public static final String HSTORE_OPEN_AND_CLOSE_THREADS_MAX =<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    "hbase.hstore.open.and.close.threads.max";<a name="line.348"></a>
+<span class="sourceLineNo">349</span><a name="line.349"></a>
+<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   * The default number for the max number of threads used for opening and<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * closing stores or store files in parallel<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   */<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  public static final int DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX = 1;<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /**<a name="line.356"></a>
+<span class="sourceLineNo">357</span>   * Block updates if memstore has hbase.hregion.memstore.block.multiplier<a name="line.357"></a>
+<span class="sourceLineNo">358</span>   * times hbase.hregion.memstore.flush.size bytes.  Useful preventing<a name="line.358"></a>
+<span class="sourceLineNo">359</span>   * runaway memstore during spikes in update traffic.<a name="line.359"></a>
+<span class="sourceLineNo">360</span>   */<a name="line.360"></a>
+<span class="sourceLineNo">361</span>  public static final String HREGION_MEMSTORE_BLOCK_MULTIPLIER =<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          "hbase.hregion.memstore.block.multiplier";<a name="line.362"></a>
+<span class="sourceLineNo">363</span><a name="line.363"></a>
+<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * Default value for hbase.hregion.memstore.block.multiplier<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
+<span class="sourceLineNo">367</span>  public static final int DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER = 4;<a name="line.367"></a>
+<span class="sourceLineNo">368</span><a name="line.368"></a>
+<span class="sourceLineNo">369</span>  /** Conf key for the memstore size at which we flush the memstore */<a name="line.369"></a>
+<span class="sourceLineNo">370</span>  public static final String HREGION_MEMSTORE_FLUSH_SIZE =<a name="line.370"></a>
+<span class="sourceLineNo">371</span>      "hbase.hregion.memstore.flush.size";<a name="line.371"></a>
+<span class="sourceLineNo">372</span><a name="line.372"></a>
+<span class="sourceLineNo">373</span>  public static final String HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      "hbase.hregion.edits.replay.skip.errors";<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  public static final boolean DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      false;<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>  /** Maximum value length, enforced on KeyValue construction */<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  public static final int MAXIMUM_VALUE_LENGTH = Integer.MAX_VALUE - 1;<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /** name of the file for unique cluster ID */<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  public static final String CLUSTER_ID_FILE_NAME = "hbase.id";<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /** Default value for cluster ID */<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  public static final String CLUSTER_ID_DEFAULT = "default-cluster";<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /** Parameter name for # days to keep MVCC values during a major compaction */<a name="line.388"></a>
+<span class="sourceLineNo">389</span>  public static final String KEEP_SEQID_PERIOD = "hbase.hstore.compaction.keep.seqId.period";<a name="line.389"></a>
+<span class="sourceLineNo">390</span>  /** At least to keep MVCC values in hfiles for 5 days */<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  public static final int MIN_KEEP_SEQID_PERIOD = 5;<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // Always store the location of the root table's HRegion.<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  // This HRegion is never split.<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // region name = table + startkey + regionid. This is the row key.<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  // each row in the root and meta tables describes exactly 1 region<a name="line.397"></a>
+<span class="sourceLineNo">398</span>  // Do we ever need to know all the information that we are storing?<a name="line.398"></a>
+<span class="sourceLineNo">399</span><a name="line.399"></a>
+<span class="sourceLineNo">400</span>  // Note that the name of the root table starts with "-" and the name of the<a name="line.400"></a>
+<span class="sourceLineNo">401</span>  // meta table starts with "." Why? it's a trick. It turns out that when we<a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // store region names in memory, we use a SortedMap. Since "-" sorts before<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  // "." (and since no other table name can start with either of these<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  // characters, the root region will always be the first entry in such a Map,<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // followed by all the meta regions (which will be ordered by their starting<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  // row key as well), followed by all user tables. So when the Master is<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  // choosing regions to assign, it will always choose the root region first,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  // followed by the meta regions, followed by user regions. Since the root<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  // and meta regions always need to be on-line, this ensures that they will<a name="line.409"></a>
+<span class="sourceLineNo">410</span>  // be the first to be reassigned if the server(s) they are being served by<a name="line.410"></a>
+<span class="sourceLineNo">411</span>  // should go down.<a name="line.411"></a>
+<span class="sourceLineNo">412</span><a name="line.412"></a>
+<span class="sourceLineNo">413</span><a name="line.413"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * The hbase:meta table's name.<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   * @deprecated For upgrades of 0.94 to 0.96<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   */<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  @Deprecated  // for compat from 0.94 -&gt; 0.96.<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  /** delimiter used between portions of a region name */<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  public static final int META_ROW_DELIMITER = ',';<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>  /** The catalog family as a string*/<a name="line.424"></a>
-<span class="sourceLineNo">425</span>  public static final String CATALOG_FAMILY_STR = "info";<a name="line.425"></a>
-<span class="sourceLineNo">426</span><a name="line.426"></a>
-<span class="sourceLineNo">427</span>  /** The catalog family */<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR);<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /** The RegionInfo qualifier as a string */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>  public static final String REGIONINFO_QUALIFIER_STR = "regioninfo";<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>  /** The regioninfo column qualifier */<a name="line.433"></a>
-<span class="sourceLineNo">434</span>  public static final byte [] REGIONINFO_QUALIFIER = Bytes.toBytes(REGIONINFO_QUALIFIER_STR);<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>  /** The server column qualifier */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  public static final String SERVER_QUALIFIER_STR = "server";<a name="line.437"></a>
+<span class="sourceLineNo">421</span>  public static final String BASE_NAMESPACE_DIR = "data";<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  /** delimiter used between portions of a region name */<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  public static final int META_ROW_DELIMITER = ',';<a name="line.424"></a>
+<span class="sourceLineNo">425</span><a name="line.425"></a>
+<span class="sourceLineNo">426</span>  /** The catalog family as a string*/<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  public static final String CATALOG_FAMILY_STR = "info";<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  /** The catalog family */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>  public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR);<a name="line.430"></a>
+<span class="sourceLineNo">431</span><a name="line.431"></a>
+<span class="sourceLineNo">432</span>  /** The RegionInfo qualifier as a string */<a name="line.432"></a>
+<span class="sourceLineNo">433</span>  public static final String REGIONINFO_QUALIFIER_STR = "regioninfo";<a name="line.433"></a>
+<span class="sourceLineNo">434</span><a name="line.434"></a>
+<span class="sourceLineNo">435</span>  /** The regioninfo column qualifier */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  public static final byte [] REGIONINFO_QUALIFIER = Bytes.toBytes(REGIONINFO_QUALIFIER_STR);<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
 <span class="sourceLineNo">438</span>  /** The server column qualifier */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  public static final byte [] SERVER_QUALIFIER = Bytes.toBytes(SERVER_QUALIFIER_STR);<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /** The startcode column qualifier */<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  public static final String STARTCODE_QUALIFIER_STR = "serverstartcode";<a name="line.442"></a>
+<span class="sourceLineNo">439</span>  public static final String SERVER_QUALIFIER_STR = "server";<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  /** The server column qualifier */<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  public static final byte [] SERVER_QUALIFIER = Bytes.toBytes(SERVER_QUALIFIER_STR);<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
 <span class="sourceLineNo">443</span>  /** The startcode column qualifier */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes(STARTCODE_QUALIFIER_STR);<a name="line.444"></a>
-<span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  /** The open seqnum column qualifier */<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  public static final String SEQNUM_QUALIFIER_STR = "seqnumDuringOpen";<a name="line.447"></a>
+<span class="sourceLineNo">444</span>  public static final String STARTCODE_QUALIFIER_STR = "serverstartcode";<a name="line.444"></a>
+<span class="sourceLineNo">445</span>  /** The startcode column qualifier */<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes(STARTCODE_QUALIFIER_STR);<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
 <span class="sourceLineNo">448</span>  /** The open seqnum column qualifier */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  public static final byte [] SEQNUM_QUALIFIER = Bytes.toBytes(SEQNUM_QUALIFIER_STR);<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>  /** The state column qualifier */<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  public static final String STATE_QUALIFIER_STR = "state";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static final byte [] STATE_QUALIFIER = Bytes.toBytes(STATE_QUALIFIER_STR);<a name="line.454"></a>
+<span class="sourceLineNo">449</span>  public static final String SEQNUM_QUALIFIER_STR = "seqnumDuringOpen";<a name="line.449"></a>
+<span class="sourceLineNo">450</span>  /** The open seqnum column qualifier */<a name="line.450"></a>
+<span class="sourceLineNo">451</span>  public static final byte [] SEQNUM_QUALIFIER = Bytes.toBytes(SEQNUM_QUALIFIER_STR);<a name="line.451"></a>
+<span class="sourceLineNo">452</span><a name="line.452"></a>
+<span class="sourceLineNo">453</span>  /** The state column qualifier */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  public static final String STATE_QUALIFIER_STR = "state";<a name="line.454"></a>
 <span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
-<span class="sourceLineNo">457</span>   * The serverName column qualifier. Its the server where the region is<a name="line.457"></a>
-<span class="sourceLineNo">458</span>   * transitioning on, while column server is the server where the region is<a name="line.458"></a>
-<span class="sourceLineNo">459</span>   * opened on. They are the same when the region is in state OPEN.<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   */<a name="line.460"></a>
-<span class="sourceLineNo">461</span>  public static final String SERVERNAME_QUALIFIER_STR = "sn";<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>  public static final byte [] SERVERNAME_QUALIFIER = Bytes.toBytes(SERVERNAME_QUALIFIER_STR);<a name="line.463"></a>
+<span class="sourceLineNo">456</span>  public static final byte [] STATE_QUALIFIER = Bytes.toBytes(STATE_QUALIFIER_STR);<a name="line.456"></a>
+<span class="sourceLineNo">457</span><a name="line.457"></a>
+<span class="sourceLineNo">458</span>  /**<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   * The serverName column qualifier. Its the server where the region is<a name="line.459"></a>
+<span class="sourceLineNo">460</span>   * transitioning on, while column server is the server where the region is<a name="line.460"></a>
+<span class="sourceLineNo">461</span>   * opened on. They are the same when the region is in state OPEN.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
+<span class="sourceLineNo">463</span>  public static final String SERVERNAME_QUALIFIER_STR = "sn";<a name="line.463"></a>
 <span class="sourceLineNo">464</span><a name="line.464"></a>
-<span class="sourceLineNo">465</span>  /** The lower-half split region column qualifier */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>  public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA");<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>  /** The upper-half split region column qualifier */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB");<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  /** The lower-half merge region column qualifier */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  public static final byte[] MERGEA_QUALIFIER = Bytes.toBytes("mergeA");<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /** The upper-half merge region column qualifier */<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  public static final byte[] MERGEB_QUALIFIER = Bytes.toBytes("mergeB");<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  /** The catalog family as a string*/<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  public static final String TABLE_FAMILY_STR = "table";<a name="line.478"></a>
-<span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>  /** The catalog family */<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  public static final byte [] TABLE_FAMILY = Bytes.toBytes(TABLE_FAMILY_STR);<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>  /** The serialized table state qualifier */<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public static final byte[] TABLE_STATE_QUALIFIER = Bytes.toBytes("state");<a name="line.484"></a>
-<span class="sourceLineNo">485</span><a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>  /**<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * The meta table version column qualifier.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * We keep current version of the meta table in this column in &lt;code&gt;-ROOT-&lt;/code&gt;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * table: i.e. in the 'info:v' column.<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   */<a name="line.491"></a>
-<span class="sourceLineNo">492</span>  public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v");<a name="line.492"></a>
-<span class="sourceLineNo">493</span><a name="line.493"></a>
-<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * The current version of the meta table.<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * - pre-hbase 0.92.  There is no META_VERSION column in the root table<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * in this case. The meta has HTableDescriptor serialized into the HRegionInfo;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * - version 0 is 0.92 and 0.94. Meta data has serialized HRegionInfo's using<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * Writable serialization, and HRegionInfo's does not contain HTableDescriptors.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * - version 1 for 0.96+ keeps HRegionInfo data structures, but changes the<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   * byte[] serialization from Writables to Protobuf.<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * See HRegionInfo.VERSION<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public static final short META_VERSION = 1;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  // Other constants<a name="line.506"></a>
+<span class="sourceLineNo">465</span>  public static final byte [] SERVERNAME_QUALIFIER = Bytes.toBytes(SERVERNAME_QUALIFIER_STR);<a name="line.465"></a>
+<span class="sourceLineNo">466</span><a name="line.466"></a>
+<span class="sourceLineNo">467</span>  /** The lower-half split region column qualifier */<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA");<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /** The upper-half split region column qualifier */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>  public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB");<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  /** The lower-half merge region column qualifier */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  public static final byte[] MERGEA_QUALIFIER = Bytes.toBytes("mergeA");<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /** The upper-half merge region column qualifier */<a name="line.476"></a>
+<span class="sourceLineNo">477</span>  public static final byte[] MERGEB_QUALIFIER = Bytes.toBytes("mergeB");<a name="line.477"></a>
+<span class="sourceLineNo">478</span><a name="line.478"></a>
+<span class="sourceLineNo">479</span>  /** The catalog family as a string*/<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static final String TABLE_FAMILY_STR = "table";<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>  /** The catalog family */<a name="line.482"></a>
+<span class="sourceLineNo">483</span>  public static final byte [] TABLE_FAMILY = Bytes.toBytes(TABLE_FAMILY_STR);<a name="line.483"></a>
+<span class="sourceLineNo">484</span><a name="line.484"></a>
+<span class="sourceLineNo">485</span>  /** The serialized table state qualifier */<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  public static final byte[] TABLE_STATE_QUALIFIER = Bytes.toBytes("state");<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * The meta table version column qualifier.<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * We keep current version of the meta table in this column in &lt;code&gt;-ROOT-&lt;/code&gt;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * table: i.e. in the 'info:v' column.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v");<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
+<span class="sourceLineNo">497</span>   * The current version of the meta table.<a name="line.497"></a>
+<span class="sourceLineNo">498</span>   * - pre-hbase 0.92.  There is no META_VERSION column in the root table<a name="line.498"></a>
+<span class="sourceLineNo">499</span>   * in this case. The meta has HTableDescriptor serialized into the HRegionInfo;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>   * - version 0 is 0.92 and 0.94. Meta data has serialized HRegionInfo's using<a name="line.500"></a>
+<span class="sourceLineNo">501</span>   * Writable serialization, and HRegionInfo's does not contain HTableDescriptors.<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   * - version 1 for 0.96+ keeps HRegionInfo data structures, but changes the<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * byte[] serialization from Writables to Protobuf.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * See HRegionInfo.VERSION<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  public static final short META_VERSION = 1;<a name="line.506"></a>
 <span class=

<TRUNCATED>

[05/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html
index 8fd15a0..da22771 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html
@@ -186,741 +186,742 @@
 <span class="sourceLineNo">178</span>   * The number of bytes per checksum.<a name="line.178"></a>
 <span class="sourceLineNo">179</span>   */<a name="line.179"></a>
 <span class="sourceLineNo">180</span>  public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  // For measuring number of checksum failures<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  static final Counter checksumFailures = new Counter();<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  // for test purpose<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static final Counter dataBlockReadCnt = new Counter();<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Number of checksum verification failures. It also<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * clears the counter.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  public static final long getChecksumFailuresCount() {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    long count = checksumFailures.get();<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    checksumFailures.set(0);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    return count;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /** API required to write an {@link HFile} */<a name="line.197"></a>
-<span class="sourceLineNo">198</span>  public interface Writer extends Closeable {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    /** Add an element to the file info map. */<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>    void append(Cell cell) throws IOException;<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>    /** @return the path to this {@link HFile} */<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    Path getPath();<a name="line.208"></a>
-<span class="sourceLineNo">209</span><a name="line.209"></a>
-<span class="sourceLineNo">210</span>    /**<a name="line.210"></a>
-<span class="sourceLineNo">211</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.211"></a>
-<span class="sourceLineNo">212</span>     * a compound Bloom filter writer.<a name="line.212"></a>
-<span class="sourceLineNo">213</span>     */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    /**<a name="line.223"></a>
-<span class="sourceLineNo">224</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.224"></a>
-<span class="sourceLineNo">225</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.225"></a>
-<span class="sourceLineNo">226</span>     * in HFile version 1 and version 2.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>     */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>    /**<a name="line.230"></a>
-<span class="sourceLineNo">231</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.231"></a>
-<span class="sourceLineNo">232</span>     * HFile V2.<a name="line.232"></a>
-<span class="sourceLineNo">233</span>     */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>    /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>     * Return the file context for the HFile this writer belongs to<a name="line.237"></a>
-<span class="sourceLineNo">238</span>     */<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    HFileContext getFileContext();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * we want to be able to swap writer implementations.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static class WriterFactory {<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    protected final Configuration conf;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    protected final CacheConfig cacheConf;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    protected FileSystem fs;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    protected Path path;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    protected FSDataOutputStream ostream;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    protected CellComparator comparator = <a name="line.252"></a>
-<span class="sourceLineNo">253</span>        CellComparator.COMPARATOR;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    protected InetSocketAddress[] favoredNodes;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    private HFileContext fileContext;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    protected boolean shouldDropBehind = false;<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.conf = conf;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.cacheConf = cacheConf;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span><a name="line.262"></a>
-<span class="sourceLineNo">263</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      Preconditions.checkNotNull(fs);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(path);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      this.fs = fs;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.path = path;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return this;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      Preconditions.checkNotNull(ostream);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      this.ostream = ostream;<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return this;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      Preconditions.checkNotNull(comparator);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      this.comparator = comparator;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      return this;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      // Deliberately not checking for null here.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      this.favoredNodes = favoredNodes;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return this;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      this.fileContext = fileContext;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      return this;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>      return this;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  // For measuring number of checksum failures<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  static final Counter CHECKSUM_FAILURES = new Counter();<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>  // For tests. Gets incremented when we read a block whether from HDFS or from Cache.<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  public static final Counter DATABLOCK_READ_COUNT = new Counter();<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /**<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * Number of checksum verification failures. It also<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * clears the counter.<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final long getChecksumFailuresCount() {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    long count = CHECKSUM_FAILURES.get();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    CHECKSUM_FAILURES.set(0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    return count;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  /** API required to write an {@link HFile} */<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  public interface Writer extends Closeable {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    /** Add an element to the file info map. */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>    void append(Cell cell) throws IOException;<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>    /** @return the path to this {@link HFile} */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    Path getPath();<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.212"></a>
+<span class="sourceLineNo">213</span>     * a compound Bloom filter writer.<a name="line.213"></a>
+<span class="sourceLineNo">214</span>     */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span><a name="line.216"></a>
+<span class="sourceLineNo">217</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>    /**<a name="line.224"></a>
+<span class="sourceLineNo">225</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.225"></a>
+<span class="sourceLineNo">226</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.226"></a>
+<span class="sourceLineNo">227</span>     * in HFile version 1 and version 2.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>     */<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    /**<a name="line.231"></a>
+<span class="sourceLineNo">232</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.232"></a>
+<span class="sourceLineNo">233</span>     * HFile V2.<a name="line.233"></a>
+<span class="sourceLineNo">234</span>     */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    /**<a name="line.237"></a>
+<span class="sourceLineNo">238</span>     * Return the file context for the HFile this writer belongs to<a name="line.238"></a>
+<span class="sourceLineNo">239</span>     */<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    HFileContext getFileContext();<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /**<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * we want to be able to swap writer implementations.<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  public static class WriterFactory {<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    protected final Configuration conf;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    protected final CacheConfig cacheConf;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    protected FileSystem fs;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    protected Path path;<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    protected FSDataOutputStream ostream;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    protected CellComparator comparator = <a name="line.253"></a>
+<span class="sourceLineNo">254</span>        CellComparator.COMPARATOR;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    protected InetSocketAddress[] favoredNodes;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    private HFileContext fileContext;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    protected boolean shouldDropBehind = false;<a name="line.257"></a>
+<span class="sourceLineNo">258</span><a name="line.258"></a>
+<span class="sourceLineNo">259</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.conf = conf;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.cacheConf = cacheConf;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(fs);<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      Preconditions.checkNotNull(path);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      this.fs = fs;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      this.path = path;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      return this;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span><a name="line.271"></a>
+<span class="sourceLineNo">272</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      Preconditions.checkNotNull(ostream);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      this.ostream = ostream;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      return this;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      Preconditions.checkNotNull(comparator);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      this.comparator = comparator;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      // Deliberately not checking for null here.<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      this.favoredNodes = favoredNodes;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      return this;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      this.fileContext = fileContext;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      return this;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      return this;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
 <span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    public Writer create() throws IOException {<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            "filesystem/path or path");<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      if (path != null) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        try {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (UnsupportedOperationException uoe) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /** The configuration key for HFile version to use for new files */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static int getFormatVersion(Configuration conf) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    checkFormatVersion(version);<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    return version;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
-<span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  /**<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * Disables block cache access for all writers created through the<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * returned factory.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.332"></a>
-<span class="sourceLineNo">333</span>       conf) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Configuration tempConf = new Configuration(conf);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  /**<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      CacheConfig cacheConf) {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    int version = getFormatVersion(conf);<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    switch (version) {<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    case 2:<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.347"></a>
-<span class="sourceLineNo">348</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        "in hbase-site.xml)");<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    case 3:<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    default:<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.354"></a>
-<span class="sourceLineNo">355</span>          "format version " + version);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * An abstraction used by the block index.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  public interface CachingBlockReader {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    /**<a name="line.365"></a>
-<span class="sourceLineNo">366</span>     * Read in a file block.<a name="line.366"></a>
-<span class="sourceLineNo">367</span>     * @param offset offset to read.<a name="line.367"></a>
-<span class="sourceLineNo">368</span>     * @param onDiskBlockSize size of the block<a name="line.368"></a>
-<span class="sourceLineNo">369</span>     * @param cacheBlock<a name="line.369"></a>
-<span class="sourceLineNo">370</span>     * @param pread<a name="line.370"></a>
-<span class="sourceLineNo">371</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.371"></a>
-<span class="sourceLineNo">372</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.372"></a>
-<span class="sourceLineNo">373</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.373"></a>
-<span class="sourceLineNo">374</span>     *  caching efficiency of encoded data blocks)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.375"></a>
-<span class="sourceLineNo">376</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.376"></a>
-<span class="sourceLineNo">377</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.377"></a>
-<span class="sourceLineNo">378</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>     * @return Block wrapped in a ByteBuffer.<a name="line.379"></a>
-<span class="sourceLineNo">380</span>     * @throws IOException<a name="line.380"></a>
-<span class="sourceLineNo">381</span>     */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.382"></a>
-<span class="sourceLineNo">383</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        throws IOException;<a name="line.386"></a>
-<span class="sourceLineNo">387</span><a name="line.387"></a>
-<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
-<span class="sourceLineNo">389</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.389"></a>
-<span class="sourceLineNo">390</span>     * @param block Block to be returned.<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     */<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    void returnBlock(HFileBlock block);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  }<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    /**<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * write.<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     */<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    String getName();<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>    CellComparator getComparator();<a name="line.404"></a>
-<span class="sourceLineNo">405</span><a name="line.405"></a>
-<span class="sourceLineNo">406</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    Cell getLastKey();<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    Cell midkey() throws IOException;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>    long length();<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>    long getEntries();<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>    Cell getFirstKey();<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    long indexSize();<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>    byte[] getFirstRowKey();<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>    byte[] getLastRowKey();<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    FixedFileTrailer getTrailer();<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>    /**<a name="line.436"></a>
-<span class="sourceLineNo">437</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.437"></a>
-<span class="sourceLineNo">438</span>     * {@link HFile} version.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>     * Knows nothing about how that metadata is structured.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>     */<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * {@link HFile}  version.<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * Knows nothing about how that metadata is structured.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     */<a name="line.447"></a>
-<span class="sourceLineNo">448</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.448"></a>
-<span class="sourceLineNo">449</span><a name="line.449"></a>
-<span class="sourceLineNo">450</span>    Path getPath();<a name="line.450"></a>
-<span class="sourceLineNo">451</span><a name="line.451"></a>
-<span class="sourceLineNo">452</span>    /** Close method with optional evictOnClose */<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    void close(boolean evictOnClose) throws IOException;<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.455"></a>
-<span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    boolean hasMVCCInfo();<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>     * Return the file context of the HFile this reader belongs to<a name="line.460"></a>
-<span class="sourceLineNo">461</span>     */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    HFileContext getFileContext();<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    <a name="line.463"></a>
-<span class="sourceLineNo">464</span>    boolean isPrimaryReplicaReader();<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    <a name="line.465"></a>
-<span class="sourceLineNo">466</span>    void setPrimaryReplicaReader(boolean isPrimaryReplicaReader);<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    boolean shouldIncludeMemstoreTS();<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>    boolean isDecodeMemstoreTS();<a name="line.470"></a>
-<span class="sourceLineNo">471</span><a name="line.471"></a>
-<span class="sourceLineNo">472</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @VisibleForTesting<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    @VisibleForTesting<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    boolean prefetchComplete();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>  }<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  /**<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * Method returns the reader given the specified arguments.<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   *<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * @param path hfile's path<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * @param fsdis stream of path's file<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * @param size max size of the trailer.<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * @param hfs<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * @return an appropriate instance of HFileReader<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   */<a name="line.492"></a>
-<span class="sourceLineNo">493</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      justification="Intentional")<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  private static Reader pickReaderVersion(Path path, FSDataInputStreamWrapper fsdis,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      long size, CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FixedFileTrailer trailer = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    try {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      switch (trailer.getMajorVersion()) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      case 2:<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        LOG.debug("Opening HFile v2 with v3 reader");<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      case 3 :<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs, conf);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      default:<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (Throwable t) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      try {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        fsdis.close();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      } catch (Throwable t2) {<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper", t2);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    }<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  /**<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param fs A file system<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param path Path to HFile<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @param fsdis a stream of path's file<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @param size max size of the trailer.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * @param conf Configuration<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * @return A version specific Hfile Reader<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("resource")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  public static Reader createReader(FileSystem fs, Path path,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      FSDataInputStreamWrapper fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      throws IOException {<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    HFileSystem hfs = null;<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // the filesystem.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    if (!(fs instanceof HFileSystem)) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      hfs = new HFileSystem(fs);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } else {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      hfs = (HFileSystem)fs;<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    return pickReaderVersion(path, fsdis, size, cacheConf, hfs, conf);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   *<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * @param fs filesystem<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * @param path Path to file to read<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * @param cacheConf This must not be null.  @see {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * @return an active Reader instance<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   */<a name="line.556"></a>
-<span class="sourceLineNo">557</span>  public static Reader createReader(<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(),<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      cacheConf, stream.getHfs(), conf);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>  /**<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * This factory method is used only by unit tests<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  static Reader createReaderFromStream(Path path,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      FSDataInputStream fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    return pickReaderVersion(path, wrapper, size, cacheConf, null, conf);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  /**<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * @param fs filesystem<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   * @param path Path to file to verify<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   * @throws IOException if failed to read from the underlying stream<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
-<span class="sourceLineNo">582</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   * @param fs filesystem<a name="line.588"></a>
-<span class="sourceLineNo">589</span>   * @param fileStatus the file to verify<a name="line.589"></a>
-<span class="sourceLineNo">590</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * @throws IOException if failed to read from the underlying stream<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   */<a name="line.592"></a>
-<span class="sourceLineNo">593</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      throws IOException {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    final Path path = fileStatus.getPath();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    final long size = fileStatus.getLen();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    try {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.599"></a>
-<span class="sourceLineNo">600</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      return true;<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    } catch (IllegalArgumentException e) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      return false;<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    } catch (IOException e) {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      throw e;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } finally {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      try {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        fsdis.close();<a name="line.609"></a>
-<span class="sourceLineNo">610</span>      } catch (Throwable t) {<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper: " + path, t);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      }<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>  }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>  /**<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;byte [], byte []&gt;(Bytes.BYTES_COMPARATOR);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    public FileInfo() {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      super();<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
-<span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * key prefix.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     *<a name="line.638"></a>
-<span class="sourceLineNo">639</span>     * @param k key to add<a name="line.639"></a>
-<span class="sourceLineNo">640</span>     * @param v value to add<a name="line.640"></a>
-<span class="sourceLineNo">641</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     *          with the reserved prefix<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     * @return this file info object<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     * @throws IOException if the key or value is invalid<a name="line.644"></a>
-<span class="sourceLineNo">645</span>     */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        final boolean checkPrefix) throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      if (k == null || v == null) {<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        throw new NullPointerException("Key nor value may be null");<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      }<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.652"></a>
-<span class="sourceLineNo">653</span>            + " are reserved");<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      put(k, v);<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return this;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span><a name="line.658"></a>
-<span class="sourceLineNo">659</span>    public void clear() {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      this.map.clear();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span><a name="line.662"></a>
-<span class="sourceLineNo">663</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>      return map.comparator();<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    }<a name="line.665"></a>
-<span class="sourceLineNo">666</span><a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public boolean containsKey(Object key) {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return map.containsKey(key);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    public boolean containsValue(Object value) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      return map.containsValue(value);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      return map.entrySet();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    public boolean equals(Object o) {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      return map.equals(o);<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span><a name="line.682"></a>
-<span class="sourceLineNo">683</span>    public byte[] firstKey() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      return map.firstKey();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
-<span class="sourceLineNo">686</span><a name="line.686"></a>
-<span class="sourceLineNo">687</span>    public byte[] get(Object key) {<a name="line.687"></a>
-<span class="sourceLineNo">688</span>      return map.get(key);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>    }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public int hashCode() {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      return map.hashCode();<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>      return this.map.headMap(toKey);<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    }<a name="line.697"></a>
-<span class="sourceLineNo">698</span><a name="line.698"></a>
-<span class="sourceLineNo">699</span>    public boolean isEmpty() {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return map.isEmpty();<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      return map.keySet();<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>    public byte[] lastKey() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>      return map.lastKey();<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    }<a name="line.709"></a>
-<span class="sourceLineNo">710</span><a name="line.710"></a>
-<span class="sourceLineNo">711</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      return this.map.put(key, value);<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    }<a name="line.713"></a>
-<span class="sourceLineNo">714</span><a name="line.714"></a>
-<span class="sourceLineNo">715</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.map.putAll(m);<a name="line.716"></a>
-<span class="sourceLineNo">717</span>    }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public byte[] remove(Object key) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return this.map.remove(key);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    public int size() {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      return map.size();<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    }<a name="line.725"></a>
-<span class="sourceLineNo">726</span><a name="line.726"></a>
-<span class="sourceLineNo">727</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return this.map.subMap(fromKey, toKey);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      return this.map.tailMap(fromKey);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    }<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>    public Collection&lt;byte[]&gt; values() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      return map.values();<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>    /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.740"></a>
-<span class="sourceLineNo">741</span>     * We write it as a protobuf.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>     * @param out<a name="line.742"></a>
-<span class="sourceLineNo">743</span>     * @throws IOException<a name="line.743"></a>
-<span class="sourceLineNo">744</span>     * @see #read(DataInputStream)<a name="line.744"></a>
-<span class="sourceLineNo">745</span>     */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    void write(final DataOutputStream out) throws IOException {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>        bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));<a name="line.751"></a>
-<span class="sourceLineNo">752</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      }<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      builder.build().writeDelimitedTo(out);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.759"></a>
-<span class="sourceLineNo">760</span>     * Can deserialize protobuf of old Writables format.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>     * @param in<a name="line.761"></a>
-<span class="sourceLineNo">762</span>     * @throws IOException<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * @see #write(DataOutputStream)<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    void read(final DataInputStream in) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      byte [] pbuf = new byte[pblen];<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (in.markSupported()) in.mark(pblen);<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      int read = in.read(pbuf);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      } else {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>        if (in.markSupported()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>          in.reset();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>          parseWritable(in);<a name="line.777"></a>
-<span class="sourceLineNo">778</span>        } else {<a name="line.778"></a>
-<span class="sourceLineNo">779</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.782"></a>
-<span class="sourceLineNo">783</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.783"></a>
-<span class="sourceLineNo">784</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.784"></a>
-<span class="sourceLineNo">785</span>          parseWritable(new DataInputStream(sis));<a name="line.785"></a>
-<span class="sourceLineNo">786</span>        }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.790"></a>
-<span class="sourceLineNo">791</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.791"></a>
-<span class="sourceLineNo">792</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>     * @throws IOException<a name="line.793"></a>
-<span class="sourceLineNo">794</span>     */<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      this.map.clear();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      // Read the number of entries in the map<a name="line.798"></a>
-<span class="sourceLineNo">799</span>      int entries = in.readInt();<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      // Then read each key/value pair<a name="line.800"></a>
-<span class="sourceLineNo">801</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        byte [] key = Bytes.readByteArray(in);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        in.readByte();<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        byte [] value = Bytes.readByteArray(in);<a name="line.805"></a>
-<span class="sourceLineNo">806</span>        this.map.put(key, value);<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>    }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span>    /**<a name="line.810"></a>
-<span class="sourceLineNo">811</span>     * Fill our map with content of the pb we read off disk<a name="line.811"></a>
-<span class="sourceLineNo">812</span>     * @param fip protobuf message to read<a name="line.812"></a>
-<span class="sourceLineNo">813</span>     */<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      this.map.clear();<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    }<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.822"></a>
-<span class="sourceLineNo">823</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.824"></a>
-<span class="sourceLineNo">825</span>  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span><a name="line.826"></a>
-<span class="sourceLineNo">827</span>  /**<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * HFile.Writer.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   *<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * @return Array of strings, each represents a supported compression<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   *         supported.<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   *         &lt;ul&gt;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>   *         &lt;li&gt;"none" - No compression.<a name="line.835"></a>
-<span class="sourceLineNo">836</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   *         &lt;/ul&gt;<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   */<a name="line.838"></a>
-<span class="sourceLineNo">839</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    return Compression.getSupportedAlgorithms();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
-<span class="sourceLineNo">842</span><a name="line.842"></a>
-<span class="sourceLineNo">843</span>  // Utility methods.<a name="line.843"></a>
-<span class="sourceLineNo">844</span>  /*<a name="line.844"></a>
-<span class="sourceLineNo">845</span>   * @param l Long to convert to an int.<a name="line.845"></a>
-<span class="sourceLineNo">846</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
-<span class="sourceLineNo">848</span>  static int longToInt(final long l) {<a name="line.848"></a>
-<span class="sourceLineNo">849</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.851"></a>
-<span class="sourceLineNo">852</span>  }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>  /**<a name="line.854"></a>
-<span class="sourceLineNo">855</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.855"></a>
-<span class="sourceLineNo">856</span>   * empty list.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>   *<a name="line.857"></a>
-<span class="sourceLineNo">858</span>   * @param fs  The file system reference.<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * @param regionDir  The region directory to scan.<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * @return The list of files found.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * @throws IOException When scanning the files fails.<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   */<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws IOException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;Path&gt;();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for(FileStatus dir : familyDirs) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      for (FileStatus file : files) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        if (!file.isDirectory() &amp;&amp;<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          regionHFiles.add(file.getPath());<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>    }<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    return regionHFiles;<a name="line.878"></a>
-<span class="sourceLineNo">879</span>  }<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>  /**<a name="line.881"></a>
-<span class="sourceLineNo">882</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.882"></a>
-<span class="sourceLineNo">883</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.883"></a>
-<span class="sourceLineNo">884</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.884"></a>
-<span class="sourceLineNo">885</span>   * indicate that this is not a software error, but corrupted input.<a name="line.885"></a>
-<span class="sourceLineNo">886</span>   *<a name="line.886"></a>
-<span class="sourceLineNo">887</span>   * @param version an HFile version<a name="line.887"></a>
-<span class="sourceLineNo">888</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   */<a name="line.889"></a>
-<span class="sourceLineNo">890</span>  public static void checkFormatVersion(int version)<a name="line.890"></a>
-<span class="sourceLineNo">891</span>      throws IllegalArgumentException {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.894"></a>
-<span class="sourceLineNo">895</span>          + MAX_FORMAT_VERSION + ")");<a name="line.895"></a>
-<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
-<span class="sourceLineNo">898</span><a name="line.898"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>    public Writer create() throws IOException {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.303"></a>
+<span class="sourceLineNo">304</span>            "filesystem/path or path");<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      if (path != null) {<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        try {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        } catch (UnsupportedOperationException uoe) {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        }<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      }<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    }<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  /** The configuration key for HFile version to use for new files */<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  public static int getFormatVersion(Configuration conf) {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    checkFormatVersion(version);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return version;<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * Disables block cache access for all writers created through the<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * returned factory.<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   */<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.333"></a>
+<span class="sourceLineNo">334</span>       conf) {<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    Configuration tempConf = new Configuration(conf);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  /**<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      CacheConfig cacheConf) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    int version = getFormatVersion(conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    switch (version) {<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    case 2:<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.350"></a>
+<span class="sourceLineNo">351</span>        "in hbase-site.xml)");<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    case 3:<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    default:<a name="line.354"></a>
+<span class="sourceLineNo">355</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          "format version " + version);<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  /**<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * An abstraction used by the block index.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public interface CachingBlockReader {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>     * Read in a file block.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>     * @param offset offset to read.<a name="line.368"></a>
+<span class="sourceLineNo">369</span>     * @param onDiskBlockSize size of the block<a name="line.369"></a>
+<span class="sourceLineNo">370</span>     * @param cacheBlock<a name="line.370"></a>
+<span class="sourceLineNo">371</span>     * @param pread<a name="line.371"></a>
+<span class="sourceLineNo">372</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.372"></a>
+<span class="sourceLineNo">373</span>     * @param expectedBlockType the block type we are expecting to read

<TRUNCATED>

[25/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html
index de0d003..ea9c5c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html
@@ -73,1205 +73,1207 @@
 <span class="sourceLineNo">065</span>  public static final byte[] RPC_HEADER = new byte[] { 'H', 'B', 'a', 's' };<a name="line.65"></a>
 <span class="sourceLineNo">066</span>  public static final byte RPC_CURRENT_VERSION = 0;<a name="line.66"></a>
 <span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  // HFileBlock constants.<a name="line.68"></a>
-<span class="sourceLineNo">069</span><a name="line.69"></a>
-<span class="sourceLineNo">070</span>  /** The size data structures with minor version is 0 */<a name="line.70"></a>
-<span class="sourceLineNo">071</span>  public static final int HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      + Bytes.SIZEOF_LONG;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  /** The size of a version 2 HFile block header, minor version 1.<a name="line.73"></a>
-<span class="sourceLineNo">074</span>   * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum<a name="line.74"></a>
-<span class="sourceLineNo">075</span>   * followed by another 4 byte value to store sizeofDataOnDisk.<a name="line.75"></a>
-<span class="sourceLineNo">076</span>   */<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  public static final int HFILEBLOCK_HEADER_SIZE = HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM +<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    Bytes.SIZEOF_BYTE + 2 * Bytes.SIZEOF_INT;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  /** Just an array of bytes of the right size. */<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  public static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HFILEBLOCK_HEADER_SIZE];<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  //End HFileBlockConstants.<a name="line.82"></a>
+<span class="sourceLineNo">068</span>  // HFileBlock constants. TODO!!!! THESE DEFINES BELONG IN HFILEBLOCK, NOT UP HERE.<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  // Needed down in hbase-common though by encoders but these encoders should not be dealing<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  // in the internals of hfileblocks. Fix encapsulation.<a name="line.70"></a>
+<span class="sourceLineNo">071</span><a name="line.71"></a>
+<span class="sourceLineNo">072</span>  /** The size data structures with minor version is 0 */<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  public static final int HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      + Bytes.SIZEOF_LONG;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  /** The size of a version 2 HFile block header, minor version 1.<a name="line.75"></a>
+<span class="sourceLineNo">076</span>   * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   * followed by another 4 byte value to store sizeofDataOnDisk.<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   */<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  public static final int HFILEBLOCK_HEADER_SIZE = HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM +<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    Bytes.SIZEOF_BYTE + 2 * Bytes.SIZEOF_INT;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  /** Just an array of bytes of the right size. */<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HFILEBLOCK_HEADER_SIZE];<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * Status codes used for return values of bulk operations.<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  @InterfaceAudience.Private<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  public enum OperationStatusCode {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    NOT_RUN,<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    SUCCESS,<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    BAD_FAMILY,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    SANITY_CHECK_FAILURE,<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    FAILURE;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  /** long constant for zero */<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  public static final Long ZERO_L = Long.valueOf(0L);<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final String NINES = "99999999999999";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final String ZEROES = "00000000000000";<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  // For migration<a name="line.101"></a>
+<span class="sourceLineNo">084</span>  //End HFileBlockConstants.<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
+<span class="sourceLineNo">087</span>   * Status codes used for return values of bulk operations.<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   */<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  @InterfaceAudience.Private<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  public enum OperationStatusCode {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    NOT_RUN,<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    SUCCESS,<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    BAD_FAMILY,<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    SANITY_CHECK_FAILURE,<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    FAILURE;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  }<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /** long constant for zero */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  public static final Long ZERO_L = Long.valueOf(0L);<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  public static final String NINES = "99999999999999";<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  public static final String ZEROES = "00000000000000";<a name="line.101"></a>
 <span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  /** name of version file */<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  public static final String VERSION_FILE_NAME = "hbase.version";<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * Current version of file system.<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * Version 4 supports only one kind of bloom filter.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * Version 5 changes versions in catalog table regions.<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * Version 6 enables blockcaching on catalog tables.<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Version 7 introduces hfile -- hbase 0.19 to 0.20..<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * Version 8 introduces namespace<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  // public static final String FILE_SYSTEM_VERSION = "6";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  public static final String FILE_SYSTEM_VERSION = "8";<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  // Configuration parameters<a name="line.117"></a>
+<span class="sourceLineNo">103</span>  // For migration<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  /** name of version file */<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public static final String VERSION_FILE_NAME = "hbase.version";<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>  /**<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * Current version of file system.<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * Version 4 supports only one kind of bloom filter.<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   * Version 5 changes versions in catalog table regions.<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Version 6 enables blockcaching on catalog tables.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * Version 7 introduces hfile -- hbase 0.19 to 0.20..<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * Version 8 introduces namespace<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  // public static final String FILE_SYSTEM_VERSION = "6";<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public static final String FILE_SYSTEM_VERSION = "8";<a name="line.117"></a>
 <span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  //TODO: Is having HBase homed on port 60k OK?<a name="line.119"></a>
+<span class="sourceLineNo">119</span>  // Configuration parameters<a name="line.119"></a>
 <span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  /** Cluster is in distributed mode or not */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  public static final String CLUSTER_DISTRIBUTED = "hbase.cluster.distributed";<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  /** Config for pluggable load balancers */<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static final String HBASE_MASTER_LOADBALANCER_CLASS = "hbase.master.loadbalancer.class";<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  /** Config for balancing the cluster by table */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public static final String HBASE_MASTER_LOADBALANCE_BYTABLE = "hbase.master.loadbalance.bytable";<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /** The name of the ensemble table */<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  public static final String ENSEMBLE_TABLE_NAME = "hbase:ensemble";<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>  /** Config for pluggable region normalizer */<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  public static final String HBASE_MASTER_NORMALIZER_CLASS =<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    "hbase.master.normalizer.class";<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  /** Cluster is standalone or pseudo-distributed */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static final boolean CLUSTER_IS_LOCAL = false;<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  /** Cluster is fully-distributed */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  public static final boolean CLUSTER_IS_DISTRIBUTED = true;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>  /** Default value for cluster distributed mode */<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public static final boolean DEFAULT_CLUSTER_DISTRIBUTED = CLUSTER_IS_LOCAL;<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>  /** default host address */<a name="line.146"></a>
-<span class="sourceLineNo">147</span>  public static final String DEFAULT_HOST = "0.0.0.0";<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  /** Parameter name for port master listens on. */<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  public static final String MASTER_PORT = "hbase.master.port";<a name="line.150"></a>
-<span class="sourceLineNo">151</span><a name="line.151"></a>
-<span class="sourceLineNo">152</span>  /** default port that the master listens on */<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  public static final int DEFAULT_MASTER_PORT = 16000;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  /** default port for master web api */<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  public static final int DEFAULT_MASTER_INFOPORT = 16010;<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>  /** Configuration key for master web API port */<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public static final String MASTER_INFO_PORT = "hbase.master.info.port";<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>  /** Parameter name for the master type being backup (waits for primary to go inactive). */<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   * by default every master is a possible primary master unless the conf explicitly overrides it<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   */<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  /** Name of ZooKeeper quorum configuration parameter. */<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  public static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  /** Common prefix of ZooKeeper configuration properties */<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  public static final String ZK_CFG_PROPERTY_PREFIX =<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      "hbase.zookeeper.property.";<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  public static final int ZK_CFG_PROPERTY_PREFIX_LEN =<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      ZK_CFG_PROPERTY_PREFIX.length();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  /**<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * The ZK client port key in the ZK properties map. The name reflects the<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * fact that this is not an HBase configuration key.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  public static final String CLIENT_PORT_STR = "clientPort";<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /** Parameter name for the client port that the zookeeper listens on */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  public static final String ZOOKEEPER_CLIENT_PORT =<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      ZK_CFG_PROPERTY_PREFIX + CLIENT_PORT_STR;<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  /** Default client port that the zookeeper listens on */<a name="line.189"></a>
-<span class="sourceLineNo">190</span>  public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>  /**<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * Parameter name for the wait time for the recoverable zookeeper<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      "hbase.zookeeper.recoverable.waittime";<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>  /** Default wait time for the recoverable zookeeper */<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  /** Parameter name for the root dir in ZK for this cluster */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent";<a name="line.202"></a>
-<span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  public static final String DEFAULT_ZOOKEEPER_ZNODE_PARENT = "/hbase";<a name="line.204"></a>
+<span class="sourceLineNo">121</span>  //TODO: Is having HBase homed on port 60k OK?<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  /** Cluster is in distributed mode or not */<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public static final String CLUSTER_DISTRIBUTED = "hbase.cluster.distributed";<a name="line.124"></a>
+<span class="sourceLineNo">125</span><a name="line.125"></a>
+<span class="sourceLineNo">126</span>  /** Config for pluggable load balancers */<a name="line.126"></a>
+<span class="sourceLineNo">127</span>  public static final String HBASE_MASTER_LOADBALANCER_CLASS = "hbase.master.loadbalancer.class";<a name="line.127"></a>
+<span class="sourceLineNo">128</span><a name="line.128"></a>
+<span class="sourceLineNo">129</span>  /** Config for balancing the cluster by table */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public static final String HBASE_MASTER_LOADBALANCE_BYTABLE = "hbase.master.loadbalance.bytable";<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /** The name of the ensemble table */<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  public static final String ENSEMBLE_TABLE_NAME = "hbase:ensemble";<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  /** Config for pluggable region normalizer */<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public static final String HBASE_MASTER_NORMALIZER_CLASS =<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    "hbase.master.normalizer.class";<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  /** Cluster is standalone or pseudo-distributed */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static final boolean CLUSTER_IS_LOCAL = false;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>  /** Cluster is fully-distributed */<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public static final boolean CLUSTER_IS_DISTRIBUTED = true;<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  /** Default value for cluster distributed mode */<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public static final boolean DEFAULT_CLUSTER_DISTRIBUTED = CLUSTER_IS_LOCAL;<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>  /** default host address */<a name="line.148"></a>
+<span class="sourceLineNo">149</span>  public static final String DEFAULT_HOST = "0.0.0.0";<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>  /** Parameter name for port master listens on. */<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  public static final String MASTER_PORT = "hbase.master.port";<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>  /** default port that the master listens on */<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  public static final int DEFAULT_MASTER_PORT = 16000;<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  /** default port for master web api */<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  public static final int DEFAULT_MASTER_INFOPORT = 16010;<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>  /** Configuration key for master web API port */<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  public static final String MASTER_INFO_PORT = "hbase.master.info.port";<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>  /** Parameter name for the master type being backup (waits for primary to go inactive). */<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>  /**<a name="line.166"></a>
+<span class="sourceLineNo">167</span>   * by default every master is a possible primary master unless the conf explicitly overrides it<a name="line.167"></a>
+<span class="sourceLineNo">168</span>   */<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>  /** Name of ZooKeeper quorum configuration parameter. */<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  public static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";<a name="line.172"></a>
+<span class="sourceLineNo">173</span><a name="line.173"></a>
+<span class="sourceLineNo">174</span>  /** Common prefix of ZooKeeper configuration properties */<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  public static final String ZK_CFG_PROPERTY_PREFIX =<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      "hbase.zookeeper.property.";<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  public static final int ZK_CFG_PROPERTY_PREFIX_LEN =<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      ZK_CFG_PROPERTY_PREFIX.length();<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  /**<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   * The ZK client port key in the ZK properties map. The name reflects the<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * fact that this is not an HBase configuration key.<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   */<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  public static final String CLIENT_PORT_STR = "clientPort";<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>  /** Parameter name for the client port that the zookeeper listens on */<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public static final String ZOOKEEPER_CLIENT_PORT =<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      ZK_CFG_PROPERTY_PREFIX + CLIENT_PORT_STR;<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>  /** Default client port that the zookeeper listens on */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;<a name="line.192"></a>
+<span class="sourceLineNo">193</span><a name="line.193"></a>
+<span class="sourceLineNo">194</span>  /**<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * Parameter name for the wait time for the recoverable zookeeper<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   */<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      "hbase.zookeeper.recoverable.waittime";<a name="line.198"></a>
+<span class="sourceLineNo">199</span><a name="line.199"></a>
+<span class="sourceLineNo">200</span>  /** Default wait time for the recoverable zookeeper */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  /** Parameter name for the root dir in ZK for this cluster */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent";<a name="line.204"></a>
 <span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Parameter name for the limit on concurrent client-side zookeeper<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * connections<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  public static final String ZOOKEEPER_MAX_CLIENT_CNXNS =<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      ZK_CFG_PROPERTY_PREFIX + "maxClientCnxns";<a name="line.211"></a>
-<span class="sourceLineNo">212</span><a name="line.212"></a>
-<span class="sourceLineNo">213</span>  /** Parameter name for the ZK data directory */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  public static final String ZOOKEEPER_DATA_DIR =<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      ZK_CFG_PROPERTY_PREFIX + "dataDir";<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /** Parameter name for the ZK tick time */<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final String ZOOKEEPER_TICK_TIME =<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ZK_CFG_PROPERTY_PREFIX + "tickTime";<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>  /** Default limit on concurrent client-side zookeeper connections */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  public static final int DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS = 300;<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /** Configuration key for ZooKeeper session timeout */<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  public static final String ZK_SESSION_TIMEOUT = "zookeeper.session.timeout";<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>  /** Default value for ZooKeeper session timeout */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  /** Configuration key for whether to use ZK.multi */<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Parameter name for port region server listens on. */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  public static final String REGIONSERVER_PORT = "hbase.regionserver.port";<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Default port region server listens on. */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  public static final int DEFAULT_REGIONSERVER_PORT = 16020;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** default port for region server web api */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  public static final int DEFAULT_REGIONSERVER_INFOPORT = 16030;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** A configuration key for regionserver info port */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  public static final String REGIONSERVER_INFO_PORT =<a name="line.243"></a>
-<span class="sourceLineNo">244</span>    "hbase.regionserver.info.port";<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>  /** A flag that enables automatic selection of regionserver info port */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static final String REGIONSERVER_INFO_PORT_AUTO =<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      REGIONSERVER_INFO_PORT + ".auto";<a name="line.248"></a>
-<span class="sourceLineNo">249</span><a name="line.249"></a>
-<span class="sourceLineNo">250</span>  /** Parameter name for what region server implementation to use. */<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public static final String REGION_SERVER_IMPL= "hbase.regionserver.impl";<a name="line.251"></a>
-<span class="sourceLineNo">252</span><a name="line.252"></a>
-<span class="sourceLineNo">253</span>  /** Parameter name for what master implementation to use. */<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  public static final String MASTER_IMPL= "hbase.master.impl";<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>  /** Parameter name for what hbase client implementation to use. */<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  public static final String HBASECLIENT_IMPL= "hbase.hbaseclient.impl";<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>  /** Parameter name for how often threads should wake up */<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  public static final String THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  /** Default value for thread wake frequency */<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  public static final int DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>  public static final String VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.268"></a>
-<span class="sourceLineNo">269</span>  public static final int DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>  /** Parameter name for how often a region should should perform a major compaction */<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  public static final String MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  /** Parameter name for the maximum batch of KVs to be used in flushes and compactions */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  public static final String COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public static final int COMPACTION_KV_MAX_DEFAULT = 10;<a name="line.276"></a>
-<span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>  /** Parameter name for HBase instance root directory */<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  public static final String HBASE_DIR = "hbase.rootdir";<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  /** Parameter name for HBase client IPC pool type */<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public static final String HBASE_CLIENT_IPC_POOL_TYPE = "hbase.client.ipc.pool.type";<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /** Parameter name for HBase client IPC pool size */<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  public static final String HBASE_CLIENT_IPC_POOL_SIZE = "hbase.client.ipc.pool.size";<a name="line.285"></a>
-<span class="sourceLineNo">286</span><a name="line.286"></a>
-<span class="sourceLineNo">287</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout";<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  public static final String HBASE_CLIENT_META_OPERATION_TIMEOUT =<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    "hbase.client.meta.operation.timeout";<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  /** Default HBase client operation timeout, which is tantamount to a blocking call */<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  public static final int DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT = 1200000;<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /** Used to construct the name of the log directory for a region server */<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  public static final String HREGION_LOGDIR_NAME = "WALs";<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>  /** Used to construct the name of the splitlog directory for a region server */<a name="line.300"></a>
-<span class="sourceLineNo">301</span>  public static final String SPLIT_LOGDIR_NAME = "splitWAL";<a name="line.301"></a>
-<span class="sourceLineNo">302</span><a name="line.302"></a>
-<span class="sourceLineNo">303</span>  /** Like the previous, but for old logs that are about to be deleted */<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span>  public static final String CORRUPT_DIR_NAME = "corrupt";<a name="line.306"></a>
+<span class="sourceLineNo">206</span>  public static final String DEFAULT_ZOOKEEPER_ZNODE_PARENT = "/hbase";<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>  /**<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * Parameter name for the limit on concurrent client-side zookeeper<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   * connections<a name="line.210"></a>
+<span class="sourceLineNo">211</span>   */<a name="line.211"></a>
+<span class="sourceLineNo">212</span>  public static final String ZOOKEEPER_MAX_CLIENT_CNXNS =<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      ZK_CFG_PROPERTY_PREFIX + "maxClientCnxns";<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>  /** Parameter name for the ZK data directory */<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final String ZOOKEEPER_DATA_DIR =<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      ZK_CFG_PROPERTY_PREFIX + "dataDir";<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  /** Parameter name for the ZK tick time */<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  public static final String ZOOKEEPER_TICK_TIME =<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      ZK_CFG_PROPERTY_PREFIX + "tickTime";<a name="line.221"></a>
+<span class="sourceLineNo">222</span><a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /** Default limit on concurrent client-side zookeeper connections */<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  public static final int DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS = 300;<a name="line.224"></a>
+<span class="sourceLineNo">225</span><a name="line.225"></a>
+<span class="sourceLineNo">226</span>  /** Configuration key for ZooKeeper session timeout */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  public static final String ZK_SESSION_TIMEOUT = "zookeeper.session.timeout";<a name="line.227"></a>
+<span class="sourceLineNo">228</span><a name="line.228"></a>
+<span class="sourceLineNo">229</span>  /** Default value for ZooKeeper session timeout */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  /** Configuration key for whether to use ZK.multi */<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>  /** Parameter name for port region server listens on. */<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  public static final String REGIONSERVER_PORT = "hbase.regionserver.port";<a name="line.236"></a>
+<span class="sourceLineNo">237</span><a name="line.237"></a>
+<span class="sourceLineNo">238</span>  /** Default port region server listens on. */<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  public static final int DEFAULT_REGIONSERVER_PORT = 16020;<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /** default port for region server web api */<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  public static final int DEFAULT_REGIONSERVER_INFOPORT = 16030;<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>  /** A configuration key for regionserver info port */<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  public static final String REGIONSERVER_INFO_PORT =<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    "hbase.regionserver.info.port";<a name="line.246"></a>
+<span class="sourceLineNo">247</span><a name="line.247"></a>
+<span class="sourceLineNo">248</span>  /** A flag that enables automatic selection of regionserver info port */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public static final String REGIONSERVER_INFO_PORT_AUTO =<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      REGIONSERVER_INFO_PORT + ".auto";<a name="line.250"></a>
+<span class="sourceLineNo">251</span><a name="line.251"></a>
+<span class="sourceLineNo">252</span>  /** Parameter name for what region server implementation to use. */<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  public static final String REGION_SERVER_IMPL= "hbase.regionserver.impl";<a name="line.253"></a>
+<span class="sourceLineNo">254</span><a name="line.254"></a>
+<span class="sourceLineNo">255</span>  /** Parameter name for what master implementation to use. */<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public static final String MASTER_IMPL= "hbase.master.impl";<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  /** Parameter name for what hbase client implementation to use. */<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  public static final String HBASECLIENT_IMPL= "hbase.hbaseclient.impl";<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  /** Parameter name for how often threads should wake up */<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  public static final String THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>  /** Default value for thread wake frequency */<a name="line.264"></a>
+<span class="sourceLineNo">265</span>  public static final int DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.267"></a>
+<span class="sourceLineNo">268</span>  public static final String VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  public static final int DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  /** Parameter name for how often a region should should perform a major compaction */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public static final String MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /** Parameter name for the maximum batch of KVs to be used in flushes and compactions */<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public static final String COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  public static final int COMPACTION_KV_MAX_DEFAULT = 10;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>  /** Parameter name for HBase instance root directory */<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  public static final String HBASE_DIR = "hbase.rootdir";<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>  /** Parameter name for HBase client IPC pool type */<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  public static final String HBASE_CLIENT_IPC_POOL_TYPE = "hbase.client.ipc.pool.type";<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /** Parameter name for HBase client IPC pool size */<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  public static final String HBASE_CLIENT_IPC_POOL_SIZE = "hbase.client.ipc.pool.size";<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout";<a name="line.290"></a>
+<span class="sourceLineNo">291</span><a name="line.291"></a>
+<span class="sourceLineNo">292</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  public static final String HBASE_CLIENT_META_OPERATION_TIMEOUT =<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    "hbase.client.meta.operation.timeout";<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
+<span class="sourceLineNo">296</span>  /** Default HBase client operation timeout, which is tantamount to a blocking call */<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  public static final int DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT = 1200000;<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>  /** Used to construct the name of the log directory for a region server */<a name="line.299"></a>
+<span class="sourceLineNo">300</span>  public static final String HREGION_LOGDIR_NAME = "WALs";<a name="line.300"></a>
+<span class="sourceLineNo">301</span><a name="line.301"></a>
+<span class="sourceLineNo">302</span>  /** Used to construct the name of the splitlog directory for a region server */<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  public static final String SPLIT_LOGDIR_NAME = "splitWAL";<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  /** Like the previous, but for old logs that are about to be deleted */<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";<a name="line.306"></a>
 <span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /** Used by HBCK to sideline backup data */<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  public static final String HBCK_SIDELINEDIR_NAME = ".hbck";<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>  /** Any artifacts left from migration can be moved here */<a name="line.311"></a>
-<span class="sourceLineNo">312</span>  public static final String MIGRATION_NAME = ".migration";<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>  /**<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * The directory from which co-processor/custom filter jars can be loaded<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * dynamically by the region servers. This value can be overridden by the<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   * hbase.dynamic.jars.dir config.<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String LIB_DIR = "lib";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  /** Used to construct the name of the compaction directory during compaction */<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  public static final String HREGION_COMPACTIONDIR_NAME = "compaction.dir";<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /** Conf key for the max file size after which we split the region */<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String HREGION_MAX_FILESIZE =<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      "hbase.hregion.max.filesize";<a name="line.326"></a>
-<span class="sourceLineNo">327</span><a name="line.327"></a>
-<span class="sourceLineNo">328</span>  /** Default maximum file size */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  public static final long DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024L;<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  /**<a name="line.331"></a>
-<span class="sourceLineNo">332</span>   * Max size of single row for Get's or Scan's without in-row scanning flag set.<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  public static final String TABLE_MAX_ROWSIZE_KEY = "hbase.table.max.rowsize";<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * Default max row size (1 Gb).<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   */<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  public static final long TABLE_MAX_ROWSIZE_DEFAULT = 1024 * 1024 * 1024L;<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * The max number of threads used for opening and closing stores or store<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * files in parallel<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  public static final String HSTORE_OPEN_AND_CLOSE_THREADS_MAX =<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    "hbase.hstore.open.and.close.threads.max";<a name="line.346"></a>
-<span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  /**<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * The default number for the max number of threads used for opening and<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   * closing stores or store files in parallel<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   */<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  public static final int DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX = 1;<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>  /**<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * Block updates if memstore has hbase.hregion.memstore.block.multiplier<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   * times hbase.hregion.memstore.flush.size bytes.  Useful preventing<a name="line.356"></a>
-<span class="sourceLineNo">357</span>   * runaway memstore during spikes in update traffic.<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   */<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  public static final String HREGION_MEMSTORE_BLOCK_MULTIPLIER =<a name="line.359"></a>
-<span class="sourceLineNo">360</span>          "hbase.hregion.memstore.block.multiplier";<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * Default value for hbase.hregion.memstore.block.multiplier<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  public static final int DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER = 4;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** Conf key for the memstore size at which we flush the memstore */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String HREGION_MEMSTORE_FLUSH_SIZE =<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      "hbase.hregion.memstore.flush.size";<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  public static final String HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      "hbase.hregion.edits.replay.skip.errors";<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  public static final boolean DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      false;<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  /** Maximum value length, enforced on KeyValue construction */<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public static final int MAXIMUM_VALUE_LENGTH = Integer.MAX_VALUE - 1;<a name="line.378"></a>
-<span class="sourceLineNo">379</span><a name="line.379"></a>
-<span class="sourceLineNo">380</span>  /** name of the file for unique cluster ID */<a name="line.380"></a>
-<span class="sourceLineNo">381</span>  public static final String CLUSTER_ID_FILE_NAME = "hbase.id";<a name="line.381"></a>
-<span class="sourceLineNo">382</span><a name="line.382"></a>
-<span class="sourceLineNo">383</span>  /** Default value for cluster ID */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public static final String CLUSTER_ID_DEFAULT = "default-cluster";<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /** Parameter name for # days to keep MVCC values during a major compaction */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  public static final String KEEP_SEQID_PERIOD = "hbase.hstore.compaction.keep.seqId.period";<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  /** At least to keep MVCC values in hfiles for 5 days */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static final int MIN_KEEP_SEQID_PERIOD = 5;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // Always store the location of the root table's HRegion.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // This HRegion is never split.<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>  // region name = table + startkey + regionid. This is the row key.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // each row in the root and meta tables describes exactly 1 region<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  // Do we ever need to know all the information that we are storing?<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // Note that the name of the root table starts with "-" and the name of the<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  // meta table starts with "." Why? it's a trick. It turns out that when we<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  // store region names in memory, we use a SortedMap. Since "-" sorts before<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // "." (and since no other table name can start with either of these<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  // characters, the root region will always be the first entry in such a Map,<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  // followed by all the meta regions (which will be ordered by their starting<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // row key as well), followed by all user tables. So when the Master is<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  // choosing regions to assign, it will always choose the root region first,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  // followed by the meta regions, followed by user regions. Since the root<a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // and meta regions always need to be on-line, this ensures that they will<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  // be the first to be reassigned if the server(s) they are being served by<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  // should go down.<a name="line.409"></a>
-<span class="sourceLineNo">410</span><a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>  /**<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * The hbase:meta table's name.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @deprecated For upgrades of 0.94 to 0.96<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  @Deprecated  // for compat from 0.94 -&gt; 0.96.<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  public static final String BASE_NAMESPACE_DIR = "data";<a name="line.419"></a>
+<span class="sourceLineNo">308</span>  public static final String CORRUPT_DIR_NAME = "corrupt";<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  /** Used by HBCK to sideline backup data */<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  public static final String HBCK_SIDELINEDIR_NAME = ".hbck";<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /** Any artifacts left from migration can be moved here */<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  public static final String MIGRATION_NAME = ".migration";<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  /**<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * The directory from which co-processor/custom filter jars can be loaded<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   * dynamically by the region servers. This value can be overridden by the<a name="line.318"></a>
+<span class="sourceLineNo">319</span>   * hbase.dynamic.jars.dir config.<a name="line.319"></a>
+<span class="sourceLineNo">320</span>   */<a name="line.320"></a>
+<span class="sourceLineNo">321</span>  public static final String LIB_DIR = "lib";<a name="line.321"></a>
+<span class="sourceLineNo">322</span><a name="line.322"></a>
+<span class="sourceLineNo">323</span>  /** Used to construct the name of the compaction directory during compaction */<a name="line.323"></a>
+<span class="sourceLineNo">324</span>  public static final String HREGION_COMPACTIONDIR_NAME = "compaction.dir";<a name="line.324"></a>
+<span class="sourceLineNo">325</span><a name="line.325"></a>
+<span class="sourceLineNo">326</span>  /** Conf key for the max file size after which we split the region */<a name="line.326"></a>
+<span class="sourceLineNo">327</span>  public static final String HREGION_MAX_FILESIZE =<a name="line.327"></a>
+<span class="sourceLineNo">328</span>      "hbase.hregion.max.filesize";<a name="line.328"></a>
+<span class="sourceLineNo">329</span><a name="line.329"></a>
+<span class="sourceLineNo">330</span>  /** Default maximum file size */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  public static final long DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024L;<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * Max size of single row for Get's or Scan's without in-row scanning flag set.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  public static final String TABLE_MAX_ROWSIZE_KEY = "hbase.table.max.rowsize";<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * Default max row size (1 Gb).<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   */<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  public static final long TABLE_MAX_ROWSIZE_DEFAULT = 1024 * 1024 * 1024L;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  /**<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * The max number of threads used for opening and closing stores or store<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * files in parallel<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  public static final String HSTORE_OPEN_AND_CLOSE_THREADS_MAX =<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    "hbase.hstore.open.and.close.threads.max";<a name="line.348"></a>
+<span class="sourceLineNo">349</span><a name="line.349"></a>
+<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   * The default number for the max number of threads used for opening and<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * closing stores or store files in parallel<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   */<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  public static final int DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX = 1;<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /**<a name="line.356"></a>
+<span class="sourceLineNo">357</span>   * Block updates if memstore has hbase.hregion.memstore.block.multiplier<a name="line.357"></a>
+<span class="sourceLineNo">358</span>   * times hbase.hregion.memstore.flush.size bytes.  Useful preventing<a name="line.358"></a>
+<span class="sourceLineNo">359</span>   * runaway memstore during spikes in update traffic.<a name="line.359"></a>
+<span class="sourceLineNo">360</span>   */<a name="line.360"></a>
+<span class="sourceLineNo">361</span>  public static final String HREGION_MEMSTORE_BLOCK_MULTIPLIER =<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          "hbase.hregion.memstore.block.multiplier";<a name="line.362"></a>
+<span class="sourceLineNo">363</span><a name="line.363"></a>
+<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * Default value for hbase.hregion.memstore.block.multiplier<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
+<span class="sourceLineNo">367</span>  public static final int DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER = 4;<a name="line.367"></a>
+<span class="sourceLineNo">368</span><a name="line.368"></a>
+<span class="sourceLineNo">369</span>  /** Conf key for the memstore size at which we flush the memstore */<a name="line.369"></a>
+<span class="sourceLineNo">370</span>  public static final String HREGION_MEMSTORE_FLUSH_SIZE =<a name="line.370"></a>
+<span class="sourceLineNo">371</span>      "hbase.hregion.memstore.flush.size";<a name="line.371"></a>
+<span class="sourceLineNo">372</span><a name="line.372"></a>
+<span class="sourceLineNo">373</span>  public static final String HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      "hbase.hregion.edits.replay.skip.errors";<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  public static final boolean DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      false;<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>  /** Maximum value length, enforced on KeyValue construction */<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  public static final int MAXIMUM_VALUE_LENGTH = Integer.MAX_VALUE - 1;<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /** name of the file for unique cluster ID */<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  public static final String CLUSTER_ID_FILE_NAME = "hbase.id";<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /** Default value for cluster ID */<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  public static final String CLUSTER_ID_DEFAULT = "default-cluster";<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /** Parameter name for # days to keep MVCC values during a major compaction */<a name="line.388"></a>
+<span class="sourceLineNo">389</span>  public static final String KEEP_SEQID_PERIOD = "hbase.hstore.compaction.keep.seqId.period";<a name="line.389"></a>
+<span class="sourceLineNo">390</span>  /** At least to keep MVCC values in hfiles for 5 days */<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  public static final int MIN_KEEP_SEQID_PERIOD = 5;<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // Always store the location of the root table's HRegion.<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  // This HRegion is never split.<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // region name = table + startkey + regionid. This is the row key.<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  // each row in the root and meta tables describes exactly 1 region<a name="line.397"></a>
+<span class="sourceLineNo">398</span>  // Do we ever need to know all the information that we are storing?<a name="line.398"></a>
+<span class="sourceLineNo">399</span><a name="line.399"></a>
+<span class="sourceLineNo">400</span>  // Note that the name of the root table starts with "-" and the name of the<a name="line.400"></a>
+<span class="sourceLineNo">401</span>  // meta table starts with "." Why? it's a trick. It turns out that when we<a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // store region names in memory, we use a SortedMap. Since "-" sorts before<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  // "." (and since no other table name can start with either of these<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  // characters, the root region will always be the first entry in such a Map,<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // followed by all the meta regions (which will be ordered by their starting<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  // row key as well), followed by all user tables. So when the Master is<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  // choosing regions to assign, it will always choose the root region first,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  // followed by the meta regions, followed by user regions. Since the root<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  // and meta regions always need to be on-line, this ensures that they will<a name="line.409"></a>
+<span class="sourceLineNo">410</span>  // be the first to be reassigned if the server(s) they are being served by<a name="line.410"></a>
+<span class="sourceLineNo">411</span>  // should go down.<a name="line.411"></a>
+<span class="sourceLineNo">412</span><a name="line.412"></a>
+<span class="sourceLineNo">413</span><a name="line.413"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * The hbase:meta table's name.<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   * @deprecated For upgrades of 0.94 to 0.96<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   */<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  @Deprecated  // for compat from 0.94 -&gt; 0.96.<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  /** delimiter used between portions of a region name */<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  public static final int META_ROW_DELIMITER = ',';<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>  /** The catalog family as a string*/<a name="line.424"></a>
-<span class="sourceLineNo">425</span>  public static final String CATALOG_FAMILY_STR = "info";<a name="line.425"></a>
-<span class="sourceLineNo">426</span><a name="line.426"></a>
-<span class="sourceLineNo">427</span>  /** The catalog family */<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR);<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /** The RegionInfo qualifier as a string */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>  public static final String REGIONINFO_QUALIFIER_STR = "regioninfo";<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>  /** The regioninfo column qualifier */<a name="line.433"></a>
-<span class="sourceLineNo">434</span>  public static final byte [] REGIONINFO_QUALIFIER = Bytes.toBytes(REGIONINFO_QUALIFIER_STR);<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>  /** The server column qualifier */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  public static final String SERVER_QUALIFIER_STR = "server";<a name="line.437"></a>
+<span class="sourceLineNo">421</span>  public static final String BASE_NAMESPACE_DIR = "data";<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  /** delimiter used between portions of a region name */<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  public static final int META_ROW_DELIMITER = ',';<a name="line.424"></a>
+<span class="sourceLineNo">425</span><a name="line.425"></a>
+<span class="sourceLineNo">426</span>  /** The catalog family as a string*/<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  public static final String CATALOG_FAMILY_STR = "info";<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  /** The catalog family */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>  public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR);<a name="line.430"></a>
+<span class="sourceLineNo">431</span><a name="line.431"></a>
+<span class="sourceLineNo">432</span>  /** The RegionInfo qualifier as a string */<a name="line.432"></a>
+<span class="sourceLineNo">433</span>  public static final String REGIONINFO_QUALIFIER_STR = "regioninfo";<a name="line.433"></a>
+<span class="sourceLineNo">434</span><a name="line.434"></a>
+<span class="sourceLineNo">435</span>  /** The regioninfo column qualifier */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  public static final byte [] REGIONINFO_QUALIFIER = Bytes.toBytes(REGIONINFO_QUALIFIER_STR);<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
 <span class="sourceLineNo">438</span>  /** The server column qualifier */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  public static final byte [] SERVER_QUALIFIER = Bytes.toBytes(SERVER_QUALIFIER_STR);<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /** The startcode column qualifier */<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  public static final String STARTCODE_QUALIFIER_STR = "serverstartcode";<a name="line.442"></a>
+<span class="sourceLineNo">439</span>  public static final String SERVER_QUALIFIER_STR = "server";<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  /** The server column qualifier */<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  public static final byte [] SERVER_QUALIFIER = Bytes.toBytes(SERVER_QUALIFIER_STR);<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
 <span class="sourceLineNo">443</span>  /** The startcode column qualifier */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes(STARTCODE_QUALIFIER_STR);<a name="line.444"></a>
-<span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  /** The open seqnum column qualifier */<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  public static final String SEQNUM_QUALIFIER_STR = "seqnumDuringOpen";<a name="line.447"></a>
+<span class="sourceLineNo">444</span>  public static final String STARTCODE_QUALIFIER_STR = "serverstartcode";<a name="line.444"></a>
+<span class="sourceLineNo">445</span>  /** The startcode column qualifier */<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes(STARTCODE_QUALIFIER_STR);<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
 <span class="sourceLineNo">448</span>  /** The open seqnum column qualifier */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  public static final byte [] SEQNUM_QUALIFIER = Bytes.toBytes(SEQNUM_QUALIFIER_STR);<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>  /** The state column qualifier */<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  public static final String STATE_QUALIFIER_STR = "state";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static final byte [] STATE_QUALIFIER = Bytes.toBytes(STATE_QUALIFIER_STR);<a name="line.454"></a>
+<span class="sourceLineNo">449</span>  public static final String SEQNUM_QUALIFIER_STR = "seqnumDuringOpen";<a name="line.449"></a>
+<span class="sourceLineNo">450</span>  /** The open seqnum column qualifier */<a name="line.450"></a>
+<span class="sourceLineNo">451</span>  public static final byte [] SEQNUM_QUALIFIER = Bytes.toBytes(SEQNUM_QUALIFIER_STR);<a name="line.451"></a>
+<span class="sourceLineNo">452</span><a name="line.452"></a>
+<span class="sourceLineNo">453</span>  /** The state column qualifier */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  public static final String STATE_QUALIFIER_STR = "state";<a name="line.454"></a>
 <span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
-<span class="sourceLineNo">457</span>   * The serverName column qualifier. Its the server where the region is<a name="line.457"></a>
-<span class="sourceLineNo">458</span>   * transitioning on, while column server is the server where the region is<a name="line.458"></a>
-<span class="sourceLineNo">459</span>   * opened on. They are the same when the region is in state OPEN.<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   */<a name="line.460"></a>
-<span class="sourceLineNo">461</span>  public static final String SERVERNAME_QUALIFIER_STR = "sn";<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>  public static final byte [] SERVERNAME_QUALIFIER = Bytes.toBytes(SERVERNAME_QUALIFIER_STR);<a name="line.463"></a>
+<span class="sourceLineNo">456</span>  public static final byte [] STATE_QUALIFIER = Bytes.toBytes(STATE_QUALIFIER_STR);<a name="line.456"></a>
+<span class="sourceLineNo">457</span><a name="line.457"></a>
+<span class="sourceLineNo">458</span>  /**<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   * The serverName column qualifier. Its the server where the region is<a name="line.459"></a>
+<span class="sourceLineNo">460</span>   * transitioning on, while column server is the server where the region is<a name="line.460"></a>
+<span class="sourceLineNo">461</span>   * opened on. They are the same when the region is in state OPEN.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
+<span class="sourceLineNo">463</span>  public static final String SERVERNAME_QUALIFIER_STR = "sn";<a name="line.463"></a>
 <span class="sourceLineNo">464</span><a name="line.464"></a>
-<span class="sourceLineNo">465</span>  /** The lower-half split region column qualifier */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>  public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA");<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>  /** The upper-half split region column qualifier */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB");<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  /** The lower-half merge region column qualifier */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  public static final byte[] MERGEA_QUALIFIER = Bytes.toBytes("mergeA");<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /** The upper-half merge region column qualifier */<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  public static final byte[] MERGEB_QUALIFIER = Bytes.toBytes("mergeB");<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  /** The catalog family as a string*/<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  public static final String TABLE_FAMILY_STR = "table";<a name="line.478"></a>
-<span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>  /** The catalog family */<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  public static final byte [] TABLE_FAMILY = Bytes.toBytes(TABLE_FAMILY_STR);<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>  /** The serialized table state qualifier */<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public static final byte[] TABLE_STATE_QUALIFIER = Bytes.toBytes("state");<a name="line.484"></a>
-<span class="sourceLineNo">485</span><a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>  /**<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * The meta table version column qualifier.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * We keep current version of the meta table in this column in &lt;code&gt;-ROOT-&lt;/code&gt;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * table: i.e. in the 'info:v' column.<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   */<a name="line.491"></a>
-<span class="sourceLineNo">492</span>  public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v");<a name="line.492"></a>
-<span class="sourceLineNo">493</span><a name="line.493"></a>
-<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * The current version of the meta table.<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * - pre-hbase 0.92.  There is no META_VERSION column in the root table<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * in this case. The meta has HTableDescriptor serialized into the HRegionInfo;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * - version 0 is 0.92 and 0.94. Meta data has serialized HRegionInfo's using<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * Writable serialization, and HRegionInfo's does not contain HTableDescriptors.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * - version 1 for 0.96+ keeps HRegionInfo data structures, but changes the<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   * byte[] serialization from Writables to Protobuf.<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * See HRegionInfo.VERSION<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public static final short META_VERSION = 1;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  // Other constants<a name="line.506"></a>
+<span class="sourceLineNo">465</span>  public static final byte [] SERVERNAME_QUALIFIER = Bytes.toBytes(SERVERNAME_QUALIFIER_STR);<a name="line.465"></a>
+<span class="sourceLineNo">466</span><a name="line.466"></a>
+<span class="sourceLineNo">467</span>  /** The lower-half split region column qualifier */<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA");<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /** The upper-half split region column qualifier */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>  public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB");<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  /** The lower-half merge region column qualifier */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  public static final byte[] MERGEA_QUALIFIER = Bytes.toBytes("mergeA");<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /** The upper-half merge region column qualifier */<a name="line.476"></a>
+<span class="sourceLineNo">477</span>  public static final byte[] MERGEB_QUALIFIER = Bytes.toBytes("mergeB");<a name="line.477"></a>
+<span class="sourceLineNo">478</span><a name="line.478"></a>
+<span class="sourceLineNo">479</span>  /** The catalog family as a string*/<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static final String TABLE_FAMILY_STR = "table";<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>  /** The catalog family */<a name="line.482"></a>
+<span class="sourceLineNo">483</span>  public static final byte [] TABLE_FAMILY = Bytes.toBytes(TABLE_FAMILY_STR);<a name="line.483"></a>
+<span class="sourceLineNo">484</span><a name="line.484"></a>
+<span class="sourceLineNo">485</span>  /** The serialized table state qualifier */<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  public static final byte[] TABLE_STATE_QUALIFIER = Bytes.toBytes("state");<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * The meta table version column qualifier.<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * We keep current version of the meta table in this column in &lt;code&gt;-ROOT-&lt;/code&gt;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * table: i.e. in the 'info:v' column.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v");<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
+<span class="sourceLineNo">497</span>   * The current version of the meta table.<a name="line.497"></a>
+<span class="sourceLineNo">498</span>   * - pre-hbase 0.92.  There is no META_VERSION column in the root table<a name="line.498"></a>
+<span class="sourceLineNo">499</span>   * in this case. The meta has HTableDescriptor serialized into the HRegionInfo;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>   * - version 0 is 0.92 and 0.94. Meta data has serialized HRegionInfo's using<a name="line.500"></a>
+<span class="sourceLineNo">501</span>   * Writable serialization, and HRegionInfo's does not contain HTableDescriptors.<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   * - version 1 for 0.96+ keeps HRegionInfo data structures, but changes the<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * byte[] serialization from Writables to Protobuf.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * See HRegionInfo.VERSION<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  public static final short META_VERSION = 1;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="so

<TRUNCATED>

[23/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index de0d003..ea9c5c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -73,1205 +73,1207 @@
 <span class="sourceLineNo">065</span>  public static final byte[] RPC_HEADER = new byte[] { 'H', 'B', 'a', 's' };<a name="line.65"></a>
 <span class="sourceLineNo">066</span>  public static final byte RPC_CURRENT_VERSION = 0;<a name="line.66"></a>
 <span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  // HFileBlock constants.<a name="line.68"></a>
-<span class="sourceLineNo">069</span><a name="line.69"></a>
-<span class="sourceLineNo">070</span>  /** The size data structures with minor version is 0 */<a name="line.70"></a>
-<span class="sourceLineNo">071</span>  public static final int HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      + Bytes.SIZEOF_LONG;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  /** The size of a version 2 HFile block header, minor version 1.<a name="line.73"></a>
-<span class="sourceLineNo">074</span>   * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum<a name="line.74"></a>
-<span class="sourceLineNo">075</span>   * followed by another 4 byte value to store sizeofDataOnDisk.<a name="line.75"></a>
-<span class="sourceLineNo">076</span>   */<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  public static final int HFILEBLOCK_HEADER_SIZE = HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM +<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    Bytes.SIZEOF_BYTE + 2 * Bytes.SIZEOF_INT;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  /** Just an array of bytes of the right size. */<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  public static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HFILEBLOCK_HEADER_SIZE];<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  //End HFileBlockConstants.<a name="line.82"></a>
+<span class="sourceLineNo">068</span>  // HFileBlock constants. TODO!!!! THESE DEFINES BELONG IN HFILEBLOCK, NOT UP HERE.<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  // Needed down in hbase-common though by encoders but these encoders should not be dealing<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  // in the internals of hfileblocks. Fix encapsulation.<a name="line.70"></a>
+<span class="sourceLineNo">071</span><a name="line.71"></a>
+<span class="sourceLineNo">072</span>  /** The size data structures with minor version is 0 */<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  public static final int HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      + Bytes.SIZEOF_LONG;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  /** The size of a version 2 HFile block header, minor version 1.<a name="line.75"></a>
+<span class="sourceLineNo">076</span>   * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   * followed by another 4 byte value to store sizeofDataOnDisk.<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   */<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  public static final int HFILEBLOCK_HEADER_SIZE = HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM +<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    Bytes.SIZEOF_BYTE + 2 * Bytes.SIZEOF_INT;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  /** Just an array of bytes of the right size. */<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HFILEBLOCK_HEADER_SIZE];<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * Status codes used for return values of bulk operations.<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  @InterfaceAudience.Private<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  public enum OperationStatusCode {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    NOT_RUN,<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    SUCCESS,<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    BAD_FAMILY,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    SANITY_CHECK_FAILURE,<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    FAILURE;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  /** long constant for zero */<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  public static final Long ZERO_L = Long.valueOf(0L);<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final String NINES = "99999999999999";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final String ZEROES = "00000000000000";<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  // For migration<a name="line.101"></a>
+<span class="sourceLineNo">084</span>  //End HFileBlockConstants.<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
+<span class="sourceLineNo">087</span>   * Status codes used for return values of bulk operations.<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   */<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  @InterfaceAudience.Private<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  public enum OperationStatusCode {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    NOT_RUN,<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    SUCCESS,<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    BAD_FAMILY,<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    SANITY_CHECK_FAILURE,<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    FAILURE;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  }<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /** long constant for zero */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  public static final Long ZERO_L = Long.valueOf(0L);<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  public static final String NINES = "99999999999999";<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  public static final String ZEROES = "00000000000000";<a name="line.101"></a>
 <span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  /** name of version file */<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  public static final String VERSION_FILE_NAME = "hbase.version";<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * Current version of file system.<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * Version 4 supports only one kind of bloom filter.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * Version 5 changes versions in catalog table regions.<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * Version 6 enables blockcaching on catalog tables.<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Version 7 introduces hfile -- hbase 0.19 to 0.20..<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * Version 8 introduces namespace<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  // public static final String FILE_SYSTEM_VERSION = "6";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  public static final String FILE_SYSTEM_VERSION = "8";<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  // Configuration parameters<a name="line.117"></a>
+<span class="sourceLineNo">103</span>  // For migration<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  /** name of version file */<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public static final String VERSION_FILE_NAME = "hbase.version";<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>  /**<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * Current version of file system.<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * Version 4 supports only one kind of bloom filter.<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   * Version 5 changes versions in catalog table regions.<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Version 6 enables blockcaching on catalog tables.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * Version 7 introduces hfile -- hbase 0.19 to 0.20..<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * Version 8 introduces namespace<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  // public static final String FILE_SYSTEM_VERSION = "6";<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public static final String FILE_SYSTEM_VERSION = "8";<a name="line.117"></a>
 <span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  //TODO: Is having HBase homed on port 60k OK?<a name="line.119"></a>
+<span class="sourceLineNo">119</span>  // Configuration parameters<a name="line.119"></a>
 <span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  /** Cluster is in distributed mode or not */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  public static final String CLUSTER_DISTRIBUTED = "hbase.cluster.distributed";<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  /** Config for pluggable load balancers */<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static final String HBASE_MASTER_LOADBALANCER_CLASS = "hbase.master.loadbalancer.class";<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  /** Config for balancing the cluster by table */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public static final String HBASE_MASTER_LOADBALANCE_BYTABLE = "hbase.master.loadbalance.bytable";<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /** The name of the ensemble table */<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  public static final String ENSEMBLE_TABLE_NAME = "hbase:ensemble";<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>  /** Config for pluggable region normalizer */<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  public static final String HBASE_MASTER_NORMALIZER_CLASS =<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    "hbase.master.normalizer.class";<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  /** Cluster is standalone or pseudo-distributed */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static final boolean CLUSTER_IS_LOCAL = false;<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  /** Cluster is fully-distributed */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  public static final boolean CLUSTER_IS_DISTRIBUTED = true;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>  /** Default value for cluster distributed mode */<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public static final boolean DEFAULT_CLUSTER_DISTRIBUTED = CLUSTER_IS_LOCAL;<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>  /** default host address */<a name="line.146"></a>
-<span class="sourceLineNo">147</span>  public static final String DEFAULT_HOST = "0.0.0.0";<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  /** Parameter name for port master listens on. */<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  public static final String MASTER_PORT = "hbase.master.port";<a name="line.150"></a>
-<span class="sourceLineNo">151</span><a name="line.151"></a>
-<span class="sourceLineNo">152</span>  /** default port that the master listens on */<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  public static final int DEFAULT_MASTER_PORT = 16000;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  /** default port for master web api */<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  public static final int DEFAULT_MASTER_INFOPORT = 16010;<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>  /** Configuration key for master web API port */<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public static final String MASTER_INFO_PORT = "hbase.master.info.port";<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>  /** Parameter name for the master type being backup (waits for primary to go inactive). */<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   * by default every master is a possible primary master unless the conf explicitly overrides it<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   */<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  /** Name of ZooKeeper quorum configuration parameter. */<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  public static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  /** Common prefix of ZooKeeper configuration properties */<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  public static final String ZK_CFG_PROPERTY_PREFIX =<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      "hbase.zookeeper.property.";<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  public static final int ZK_CFG_PROPERTY_PREFIX_LEN =<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      ZK_CFG_PROPERTY_PREFIX.length();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  /**<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * The ZK client port key in the ZK properties map. The name reflects the<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * fact that this is not an HBase configuration key.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  public static final String CLIENT_PORT_STR = "clientPort";<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /** Parameter name for the client port that the zookeeper listens on */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  public static final String ZOOKEEPER_CLIENT_PORT =<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      ZK_CFG_PROPERTY_PREFIX + CLIENT_PORT_STR;<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  /** Default client port that the zookeeper listens on */<a name="line.189"></a>
-<span class="sourceLineNo">190</span>  public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>  /**<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * Parameter name for the wait time for the recoverable zookeeper<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      "hbase.zookeeper.recoverable.waittime";<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>  /** Default wait time for the recoverable zookeeper */<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  /** Parameter name for the root dir in ZK for this cluster */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent";<a name="line.202"></a>
-<span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  public static final String DEFAULT_ZOOKEEPER_ZNODE_PARENT = "/hbase";<a name="line.204"></a>
+<span class="sourceLineNo">121</span>  //TODO: Is having HBase homed on port 60k OK?<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  /** Cluster is in distributed mode or not */<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public static final String CLUSTER_DISTRIBUTED = "hbase.cluster.distributed";<a name="line.124"></a>
+<span class="sourceLineNo">125</span><a name="line.125"></a>
+<span class="sourceLineNo">126</span>  /** Config for pluggable load balancers */<a name="line.126"></a>
+<span class="sourceLineNo">127</span>  public static final String HBASE_MASTER_LOADBALANCER_CLASS = "hbase.master.loadbalancer.class";<a name="line.127"></a>
+<span class="sourceLineNo">128</span><a name="line.128"></a>
+<span class="sourceLineNo">129</span>  /** Config for balancing the cluster by table */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public static final String HBASE_MASTER_LOADBALANCE_BYTABLE = "hbase.master.loadbalance.bytable";<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /** The name of the ensemble table */<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  public static final String ENSEMBLE_TABLE_NAME = "hbase:ensemble";<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  /** Config for pluggable region normalizer */<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public static final String HBASE_MASTER_NORMALIZER_CLASS =<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    "hbase.master.normalizer.class";<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  /** Cluster is standalone or pseudo-distributed */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static final boolean CLUSTER_IS_LOCAL = false;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>  /** Cluster is fully-distributed */<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public static final boolean CLUSTER_IS_DISTRIBUTED = true;<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  /** Default value for cluster distributed mode */<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public static final boolean DEFAULT_CLUSTER_DISTRIBUTED = CLUSTER_IS_LOCAL;<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>  /** default host address */<a name="line.148"></a>
+<span class="sourceLineNo">149</span>  public static final String DEFAULT_HOST = "0.0.0.0";<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>  /** Parameter name for port master listens on. */<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  public static final String MASTER_PORT = "hbase.master.port";<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>  /** default port that the master listens on */<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  public static final int DEFAULT_MASTER_PORT = 16000;<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  /** default port for master web api */<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  public static final int DEFAULT_MASTER_INFOPORT = 16010;<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>  /** Configuration key for master web API port */<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  public static final String MASTER_INFO_PORT = "hbase.master.info.port";<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>  /** Parameter name for the master type being backup (waits for primary to go inactive). */<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>  /**<a name="line.166"></a>
+<span class="sourceLineNo">167</span>   * by default every master is a possible primary master unless the conf explicitly overrides it<a name="line.167"></a>
+<span class="sourceLineNo">168</span>   */<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>  /** Name of ZooKeeper quorum configuration parameter. */<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  public static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";<a name="line.172"></a>
+<span class="sourceLineNo">173</span><a name="line.173"></a>
+<span class="sourceLineNo">174</span>  /** Common prefix of ZooKeeper configuration properties */<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  public static final String ZK_CFG_PROPERTY_PREFIX =<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      "hbase.zookeeper.property.";<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  public static final int ZK_CFG_PROPERTY_PREFIX_LEN =<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      ZK_CFG_PROPERTY_PREFIX.length();<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  /**<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   * The ZK client port key in the ZK properties map. The name reflects the<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * fact that this is not an HBase configuration key.<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   */<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  public static final String CLIENT_PORT_STR = "clientPort";<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>  /** Parameter name for the client port that the zookeeper listens on */<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public static final String ZOOKEEPER_CLIENT_PORT =<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      ZK_CFG_PROPERTY_PREFIX + CLIENT_PORT_STR;<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>  /** Default client port that the zookeeper listens on */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;<a name="line.192"></a>
+<span class="sourceLineNo">193</span><a name="line.193"></a>
+<span class="sourceLineNo">194</span>  /**<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * Parameter name for the wait time for the recoverable zookeeper<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   */<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      "hbase.zookeeper.recoverable.waittime";<a name="line.198"></a>
+<span class="sourceLineNo">199</span><a name="line.199"></a>
+<span class="sourceLineNo">200</span>  /** Default wait time for the recoverable zookeeper */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  /** Parameter name for the root dir in ZK for this cluster */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent";<a name="line.204"></a>
 <span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Parameter name for the limit on concurrent client-side zookeeper<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * connections<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  public static final String ZOOKEEPER_MAX_CLIENT_CNXNS =<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      ZK_CFG_PROPERTY_PREFIX + "maxClientCnxns";<a name="line.211"></a>
-<span class="sourceLineNo">212</span><a name="line.212"></a>
-<span class="sourceLineNo">213</span>  /** Parameter name for the ZK data directory */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  public static final String ZOOKEEPER_DATA_DIR =<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      ZK_CFG_PROPERTY_PREFIX + "dataDir";<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /** Parameter name for the ZK tick time */<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final String ZOOKEEPER_TICK_TIME =<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ZK_CFG_PROPERTY_PREFIX + "tickTime";<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>  /** Default limit on concurrent client-side zookeeper connections */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  public static final int DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS = 300;<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /** Configuration key for ZooKeeper session timeout */<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  public static final String ZK_SESSION_TIMEOUT = "zookeeper.session.timeout";<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>  /** Default value for ZooKeeper session timeout */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  /** Configuration key for whether to use ZK.multi */<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Parameter name for port region server listens on. */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  public static final String REGIONSERVER_PORT = "hbase.regionserver.port";<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Default port region server listens on. */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  public static final int DEFAULT_REGIONSERVER_PORT = 16020;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** default port for region server web api */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  public static final int DEFAULT_REGIONSERVER_INFOPORT = 16030;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** A configuration key for regionserver info port */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  public static final String REGIONSERVER_INFO_PORT =<a name="line.243"></a>
-<span class="sourceLineNo">244</span>    "hbase.regionserver.info.port";<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>  /** A flag that enables automatic selection of regionserver info port */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static final String REGIONSERVER_INFO_PORT_AUTO =<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      REGIONSERVER_INFO_PORT + ".auto";<a name="line.248"></a>
-<span class="sourceLineNo">249</span><a name="line.249"></a>
-<span class="sourceLineNo">250</span>  /** Parameter name for what region server implementation to use. */<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public static final String REGION_SERVER_IMPL= "hbase.regionserver.impl";<a name="line.251"></a>
-<span class="sourceLineNo">252</span><a name="line.252"></a>
-<span class="sourceLineNo">253</span>  /** Parameter name for what master implementation to use. */<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  public static final String MASTER_IMPL= "hbase.master.impl";<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>  /** Parameter name for what hbase client implementation to use. */<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  public static final String HBASECLIENT_IMPL= "hbase.hbaseclient.impl";<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>  /** Parameter name for how often threads should wake up */<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  public static final String THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  /** Default value for thread wake frequency */<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  public static final int DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>  public static final String VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.268"></a>
-<span class="sourceLineNo">269</span>  public static final int DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>  /** Parameter name for how often a region should should perform a major compaction */<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  public static final String MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  /** Parameter name for the maximum batch of KVs to be used in flushes and compactions */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  public static final String COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public static final int COMPACTION_KV_MAX_DEFAULT = 10;<a name="line.276"></a>
-<span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>  /** Parameter name for HBase instance root directory */<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  public static final String HBASE_DIR = "hbase.rootdir";<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  /** Parameter name for HBase client IPC pool type */<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public static final String HBASE_CLIENT_IPC_POOL_TYPE = "hbase.client.ipc.pool.type";<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /** Parameter name for HBase client IPC pool size */<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  public static final String HBASE_CLIENT_IPC_POOL_SIZE = "hbase.client.ipc.pool.size";<a name="line.285"></a>
-<span class="sourceLineNo">286</span><a name="line.286"></a>
-<span class="sourceLineNo">287</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout";<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  public static final String HBASE_CLIENT_META_OPERATION_TIMEOUT =<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    "hbase.client.meta.operation.timeout";<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  /** Default HBase client operation timeout, which is tantamount to a blocking call */<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  public static final int DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT = 1200000;<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /** Used to construct the name of the log directory for a region server */<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  public static final String HREGION_LOGDIR_NAME = "WALs";<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>  /** Used to construct the name of the splitlog directory for a region server */<a name="line.300"></a>
-<span class="sourceLineNo">301</span>  public static final String SPLIT_LOGDIR_NAME = "splitWAL";<a name="line.301"></a>
-<span class="sourceLineNo">302</span><a name="line.302"></a>
-<span class="sourceLineNo">303</span>  /** Like the previous, but for old logs that are about to be deleted */<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span>  public static final String CORRUPT_DIR_NAME = "corrupt";<a name="line.306"></a>
+<span class="sourceLineNo">206</span>  public static final String DEFAULT_ZOOKEEPER_ZNODE_PARENT = "/hbase";<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>  /**<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * Parameter name for the limit on concurrent client-side zookeeper<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   * connections<a name="line.210"></a>
+<span class="sourceLineNo">211</span>   */<a name="line.211"></a>
+<span class="sourceLineNo">212</span>  public static final String ZOOKEEPER_MAX_CLIENT_CNXNS =<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      ZK_CFG_PROPERTY_PREFIX + "maxClientCnxns";<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>  /** Parameter name for the ZK data directory */<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final String ZOOKEEPER_DATA_DIR =<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      ZK_CFG_PROPERTY_PREFIX + "dataDir";<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  /** Parameter name for the ZK tick time */<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  public static final String ZOOKEEPER_TICK_TIME =<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      ZK_CFG_PROPERTY_PREFIX + "tickTime";<a name="line.221"></a>
+<span class="sourceLineNo">222</span><a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /** Default limit on concurrent client-side zookeeper connections */<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  public static final int DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS = 300;<a name="line.224"></a>
+<span class="sourceLineNo">225</span><a name="line.225"></a>
+<span class="sourceLineNo">226</span>  /** Configuration key for ZooKeeper session timeout */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  public static final String ZK_SESSION_TIMEOUT = "zookeeper.session.timeout";<a name="line.227"></a>
+<span class="sourceLineNo">228</span><a name="line.228"></a>
+<span class="sourceLineNo">229</span>  /** Default value for ZooKeeper session timeout */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  /** Configuration key for whether to use ZK.multi */<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>  /** Parameter name for port region server listens on. */<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  public static final String REGIONSERVER_PORT = "hbase.regionserver.port";<a name="line.236"></a>
+<span class="sourceLineNo">237</span><a name="line.237"></a>
+<span class="sourceLineNo">238</span>  /** Default port region server listens on. */<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  public static final int DEFAULT_REGIONSERVER_PORT = 16020;<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /** default port for region server web api */<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  public static final int DEFAULT_REGIONSERVER_INFOPORT = 16030;<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>  /** A configuration key for regionserver info port */<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  public static final String REGIONSERVER_INFO_PORT =<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    "hbase.regionserver.info.port";<a name="line.246"></a>
+<span class="sourceLineNo">247</span><a name="line.247"></a>
+<span class="sourceLineNo">248</span>  /** A flag that enables automatic selection of regionserver info port */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public static final String REGIONSERVER_INFO_PORT_AUTO =<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      REGIONSERVER_INFO_PORT + ".auto";<a name="line.250"></a>
+<span class="sourceLineNo">251</span><a name="line.251"></a>
+<span class="sourceLineNo">252</span>  /** Parameter name for what region server implementation to use. */<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  public static final String REGION_SERVER_IMPL= "hbase.regionserver.impl";<a name="line.253"></a>
+<span class="sourceLineNo">254</span><a name="line.254"></a>
+<span class="sourceLineNo">255</span>  /** Parameter name for what master implementation to use. */<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public static final String MASTER_IMPL= "hbase.master.impl";<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  /** Parameter name for what hbase client implementation to use. */<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  public static final String HBASECLIENT_IMPL= "hbase.hbaseclient.impl";<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  /** Parameter name for how often threads should wake up */<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  public static final String THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>  /** Default value for thread wake frequency */<a name="line.264"></a>
+<span class="sourceLineNo">265</span>  public static final int DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.267"></a>
+<span class="sourceLineNo">268</span>  public static final String VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  public static final int DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  /** Parameter name for how often a region should should perform a major compaction */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public static final String MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /** Parameter name for the maximum batch of KVs to be used in flushes and compactions */<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public static final String COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  public static final int COMPACTION_KV_MAX_DEFAULT = 10;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>  /** Parameter name for HBase instance root directory */<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  public static final String HBASE_DIR = "hbase.rootdir";<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>  /** Parameter name for HBase client IPC pool type */<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  public static final String HBASE_CLIENT_IPC_POOL_TYPE = "hbase.client.ipc.pool.type";<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /** Parameter name for HBase client IPC pool size */<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  public static final String HBASE_CLIENT_IPC_POOL_SIZE = "hbase.client.ipc.pool.size";<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout";<a name="line.290"></a>
+<span class="sourceLineNo">291</span><a name="line.291"></a>
+<span class="sourceLineNo">292</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  public static final String HBASE_CLIENT_META_OPERATION_TIMEOUT =<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    "hbase.client.meta.operation.timeout";<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
+<span class="sourceLineNo">296</span>  /** Default HBase client operation timeout, which is tantamount to a blocking call */<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  public static final int DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT = 1200000;<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>  /** Used to construct the name of the log directory for a region server */<a name="line.299"></a>
+<span class="sourceLineNo">300</span>  public static final String HREGION_LOGDIR_NAME = "WALs";<a name="line.300"></a>
+<span class="sourceLineNo">301</span><a name="line.301"></a>
+<span class="sourceLineNo">302</span>  /** Used to construct the name of the splitlog directory for a region server */<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  public static final String SPLIT_LOGDIR_NAME = "splitWAL";<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  /** Like the previous, but for old logs that are about to be deleted */<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";<a name="line.306"></a>
 <span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /** Used by HBCK to sideline backup data */<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  public static final String HBCK_SIDELINEDIR_NAME = ".hbck";<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>  /** Any artifacts left from migration can be moved here */<a name="line.311"></a>
-<span class="sourceLineNo">312</span>  public static final String MIGRATION_NAME = ".migration";<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>  /**<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * The directory from which co-processor/custom filter jars can be loaded<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * dynamically by the region servers. This value can be overridden by the<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   * hbase.dynamic.jars.dir config.<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String LIB_DIR = "lib";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  /** Used to construct the name of the compaction directory during compaction */<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  public static final String HREGION_COMPACTIONDIR_NAME = "compaction.dir";<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /** Conf key for the max file size after which we split the region */<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String HREGION_MAX_FILESIZE =<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      "hbase.hregion.max.filesize";<a name="line.326"></a>
-<span class="sourceLineNo">327</span><a name="line.327"></a>
-<span class="sourceLineNo">328</span>  /** Default maximum file size */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  public static final long DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024L;<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  /**<a name="line.331"></a>
-<span class="sourceLineNo">332</span>   * Max size of single row for Get's or Scan's without in-row scanning flag set.<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  public static final String TABLE_MAX_ROWSIZE_KEY = "hbase.table.max.rowsize";<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * Default max row size (1 Gb).<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   */<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  public static final long TABLE_MAX_ROWSIZE_DEFAULT = 1024 * 1024 * 1024L;<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * The max number of threads used for opening and closing stores or store<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * files in parallel<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  public static final String HSTORE_OPEN_AND_CLOSE_THREADS_MAX =<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    "hbase.hstore.open.and.close.threads.max";<a name="line.346"></a>
-<span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  /**<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * The default number for the max number of threads used for opening and<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   * closing stores or store files in parallel<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   */<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  public static final int DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX = 1;<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>  /**<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * Block updates if memstore has hbase.hregion.memstore.block.multiplier<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   * times hbase.hregion.memstore.flush.size bytes.  Useful preventing<a name="line.356"></a>
-<span class="sourceLineNo">357</span>   * runaway memstore during spikes in update traffic.<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   */<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  public static final String HREGION_MEMSTORE_BLOCK_MULTIPLIER =<a name="line.359"></a>
-<span class="sourceLineNo">360</span>          "hbase.hregion.memstore.block.multiplier";<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * Default value for hbase.hregion.memstore.block.multiplier<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  public static final int DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER = 4;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** Conf key for the memstore size at which we flush the memstore */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String HREGION_MEMSTORE_FLUSH_SIZE =<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      "hbase.hregion.memstore.flush.size";<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  public static final String HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      "hbase.hregion.edits.replay.skip.errors";<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  public static final boolean DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      false;<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  /** Maximum value length, enforced on KeyValue construction */<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public static final int MAXIMUM_VALUE_LENGTH = Integer.MAX_VALUE - 1;<a name="line.378"></a>
-<span class="sourceLineNo">379</span><a name="line.379"></a>
-<span class="sourceLineNo">380</span>  /** name of the file for unique cluster ID */<a name="line.380"></a>
-<span class="sourceLineNo">381</span>  public static final String CLUSTER_ID_FILE_NAME = "hbase.id";<a name="line.381"></a>
-<span class="sourceLineNo">382</span><a name="line.382"></a>
-<span class="sourceLineNo">383</span>  /** Default value for cluster ID */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public static final String CLUSTER_ID_DEFAULT = "default-cluster";<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /** Parameter name for # days to keep MVCC values during a major compaction */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  public static final String KEEP_SEQID_PERIOD = "hbase.hstore.compaction.keep.seqId.period";<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  /** At least to keep MVCC values in hfiles for 5 days */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static final int MIN_KEEP_SEQID_PERIOD = 5;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // Always store the location of the root table's HRegion.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // This HRegion is never split.<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>  // region name = table + startkey + regionid. This is the row key.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // each row in the root and meta tables describes exactly 1 region<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  // Do we ever need to know all the information that we are storing?<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // Note that the name of the root table starts with "-" and the name of the<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  // meta table starts with "." Why? it's a trick. It turns out that when we<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  // store region names in memory, we use a SortedMap. Since "-" sorts before<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // "." (and since no other table name can start with either of these<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  // characters, the root region will always be the first entry in such a Map,<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  // followed by all the meta regions (which will be ordered by their starting<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // row key as well), followed by all user tables. So when the Master is<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  // choosing regions to assign, it will always choose the root region first,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  // followed by the meta regions, followed by user regions. Since the root<a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // and meta regions always need to be on-line, this ensures that they will<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  // be the first to be reassigned if the server(s) they are being served by<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  // should go down.<a name="line.409"></a>
-<span class="sourceLineNo">410</span><a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>  /**<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * The hbase:meta table's name.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @deprecated For upgrades of 0.94 to 0.96<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  @Deprecated  // for compat from 0.94 -&gt; 0.96.<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  public static final String BASE_NAMESPACE_DIR = "data";<a name="line.419"></a>
+<span class="sourceLineNo">308</span>  public static final String CORRUPT_DIR_NAME = "corrupt";<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  /** Used by HBCK to sideline backup data */<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  public static final String HBCK_SIDELINEDIR_NAME = ".hbck";<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /** Any artifacts left from migration can be moved here */<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  public static final String MIGRATION_NAME = ".migration";<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  /**<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * The directory from which co-processor/custom filter jars can be loaded<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   * dynamically by the region servers. This value can be overridden by the<a name="line.318"></a>
+<span class="sourceLineNo">319</span>   * hbase.dynamic.jars.dir config.<a name="line.319"></a>
+<span class="sourceLineNo">320</span>   */<a name="line.320"></a>
+<span class="sourceLineNo">321</span>  public static final String LIB_DIR = "lib";<a name="line.321"></a>
+<span class="sourceLineNo">322</span><a name="line.322"></a>
+<span class="sourceLineNo">323</span>  /** Used to construct the name of the compaction directory during compaction */<a name="line.323"></a>
+<span class="sourceLineNo">324</span>  public static final String HREGION_COMPACTIONDIR_NAME = "compaction.dir";<a name="line.324"></a>
+<span class="sourceLineNo">325</span><a name="line.325"></a>
+<span class="sourceLineNo">326</span>  /** Conf key for the max file size after which we split the region */<a name="line.326"></a>
+<span class="sourceLineNo">327</span>  public static final String HREGION_MAX_FILESIZE =<a name="line.327"></a>
+<span class="sourceLineNo">328</span>      "hbase.hregion.max.filesize";<a name="line.328"></a>
+<span class="sourceLineNo">329</span><a name="line.329"></a>
+<span class="sourceLineNo">330</span>  /** Default maximum file size */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  public static final long DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024L;<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * Max size of single row for Get's or Scan's without in-row scanning flag set.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  public static final String TABLE_MAX_ROWSIZE_KEY = "hbase.table.max.rowsize";<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * Default max row size (1 Gb).<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   */<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  public static final long TABLE_MAX_ROWSIZE_DEFAULT = 1024 * 1024 * 1024L;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  /**<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * The max number of threads used for opening and closing stores or store<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * files in parallel<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  public static final String HSTORE_OPEN_AND_CLOSE_THREADS_MAX =<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    "hbase.hstore.open.and.close.threads.max";<a name="line.348"></a>
+<span class="sourceLineNo">349</span><a name="line.349"></a>
+<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   * The default number for the max number of threads used for opening and<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * closing stores or store files in parallel<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   */<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  public static final int DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX = 1;<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /**<a name="line.356"></a>
+<span class="sourceLineNo">357</span>   * Block updates if memstore has hbase.hregion.memstore.block.multiplier<a name="line.357"></a>
+<span class="sourceLineNo">358</span>   * times hbase.hregion.memstore.flush.size bytes.  Useful preventing<a name="line.358"></a>
+<span class="sourceLineNo">359</span>   * runaway memstore during spikes in update traffic.<a name="line.359"></a>
+<span class="sourceLineNo">360</span>   */<a name="line.360"></a>
+<span class="sourceLineNo">361</span>  public static final String HREGION_MEMSTORE_BLOCK_MULTIPLIER =<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          "hbase.hregion.memstore.block.multiplier";<a name="line.362"></a>
+<span class="sourceLineNo">363</span><a name="line.363"></a>
+<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * Default value for hbase.hregion.memstore.block.multiplier<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
+<span class="sourceLineNo">367</span>  public static final int DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER = 4;<a name="line.367"></a>
+<span class="sourceLineNo">368</span><a name="line.368"></a>
+<span class="sourceLineNo">369</span>  /** Conf key for the memstore size at which we flush the memstore */<a name="line.369"></a>
+<span class="sourceLineNo">370</span>  public static final String HREGION_MEMSTORE_FLUSH_SIZE =<a name="line.370"></a>
+<span class="sourceLineNo">371</span>      "hbase.hregion.memstore.flush.size";<a name="line.371"></a>
+<span class="sourceLineNo">372</span><a name="line.372"></a>
+<span class="sourceLineNo">373</span>  public static final String HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      "hbase.hregion.edits.replay.skip.errors";<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  public static final boolean DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      false;<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>  /** Maximum value length, enforced on KeyValue construction */<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  public static final int MAXIMUM_VALUE_LENGTH = Integer.MAX_VALUE - 1;<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /** name of the file for unique cluster ID */<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  public static final String CLUSTER_ID_FILE_NAME = "hbase.id";<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /** Default value for cluster ID */<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  public static final String CLUSTER_ID_DEFAULT = "default-cluster";<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /** Parameter name for # days to keep MVCC values during a major compaction */<a name="line.388"></a>
+<span class="sourceLineNo">389</span>  public static final String KEEP_SEQID_PERIOD = "hbase.hstore.compaction.keep.seqId.period";<a name="line.389"></a>
+<span class="sourceLineNo">390</span>  /** At least to keep MVCC values in hfiles for 5 days */<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  public static final int MIN_KEEP_SEQID_PERIOD = 5;<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // Always store the location of the root table's HRegion.<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  // This HRegion is never split.<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // region name = table + startkey + regionid. This is the row key.<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  // each row in the root and meta tables describes exactly 1 region<a name="line.397"></a>
+<span class="sourceLineNo">398</span>  // Do we ever need to know all the information that we are storing?<a name="line.398"></a>
+<span class="sourceLineNo">399</span><a name="line.399"></a>
+<span class="sourceLineNo">400</span>  // Note that the name of the root table starts with "-" and the name of the<a name="line.400"></a>
+<span class="sourceLineNo">401</span>  // meta table starts with "." Why? it's a trick. It turns out that when we<a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // store region names in memory, we use a SortedMap. Since "-" sorts before<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  // "." (and since no other table name can start with either of these<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  // characters, the root region will always be the first entry in such a Map,<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // followed by all the meta regions (which will be ordered by their starting<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  // row key as well), followed by all user tables. So when the Master is<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  // choosing regions to assign, it will always choose the root region first,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  // followed by the meta regions, followed by user regions. Since the root<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  // and meta regions always need to be on-line, this ensures that they will<a name="line.409"></a>
+<span class="sourceLineNo">410</span>  // be the first to be reassigned if the server(s) they are being served by<a name="line.410"></a>
+<span class="sourceLineNo">411</span>  // should go down.<a name="line.411"></a>
+<span class="sourceLineNo">412</span><a name="line.412"></a>
+<span class="sourceLineNo">413</span><a name="line.413"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * The hbase:meta table's name.<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   * @deprecated For upgrades of 0.94 to 0.96<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   */<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  @Deprecated  // for compat from 0.94 -&gt; 0.96.<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  /** delimiter used between portions of a region name */<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  public static final int META_ROW_DELIMITER = ',';<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>  /** The catalog family as a string*/<a name="line.424"></a>
-<span class="sourceLineNo">425</span>  public static final String CATALOG_FAMILY_STR = "info";<a name="line.425"></a>
-<span class="sourceLineNo">426</span><a name="line.426"></a>
-<span class="sourceLineNo">427</span>  /** The catalog family */<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR);<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /** The RegionInfo qualifier as a string */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>  public static final String REGIONINFO_QUALIFIER_STR = "regioninfo";<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>  /** The regioninfo column qualifier */<a name="line.433"></a>
-<span class="sourceLineNo">434</span>  public static final byte [] REGIONINFO_QUALIFIER = Bytes.toBytes(REGIONINFO_QUALIFIER_STR);<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>  /** The server column qualifier */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  public static final String SERVER_QUALIFIER_STR = "server";<a name="line.437"></a>
+<span class="sourceLineNo">421</span>  public static final String BASE_NAMESPACE_DIR = "data";<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  /** delimiter used between portions of a region name */<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  public static final int META_ROW_DELIMITER = ',';<a name="line.424"></a>
+<span class="sourceLineNo">425</span><a name="line.425"></a>
+<span class="sourceLineNo">426</span>  /** The catalog family as a string*/<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  public static final String CATALOG_FAMILY_STR = "info";<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  /** The catalog family */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>  public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR);<a name="line.430"></a>
+<span class="sourceLineNo">431</span><a name="line.431"></a>
+<span class="sourceLineNo">432</span>  /** The RegionInfo qualifier as a string */<a name="line.432"></a>
+<span class="sourceLineNo">433</span>  public static final String REGIONINFO_QUALIFIER_STR = "regioninfo";<a name="line.433"></a>
+<span class="sourceLineNo">434</span><a name="line.434"></a>
+<span class="sourceLineNo">435</span>  /** The regioninfo column qualifier */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  public static final byte [] REGIONINFO_QUALIFIER = Bytes.toBytes(REGIONINFO_QUALIFIER_STR);<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
 <span class="sourceLineNo">438</span>  /** The server column qualifier */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  public static final byte [] SERVER_QUALIFIER = Bytes.toBytes(SERVER_QUALIFIER_STR);<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /** The startcode column qualifier */<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  public static final String STARTCODE_QUALIFIER_STR = "serverstartcode";<a name="line.442"></a>
+<span class="sourceLineNo">439</span>  public static final String SERVER_QUALIFIER_STR = "server";<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  /** The server column qualifier */<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  public static final byte [] SERVER_QUALIFIER = Bytes.toBytes(SERVER_QUALIFIER_STR);<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
 <span class="sourceLineNo">443</span>  /** The startcode column qualifier */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes(STARTCODE_QUALIFIER_STR);<a name="line.444"></a>
-<span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  /** The open seqnum column qualifier */<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  public static final String SEQNUM_QUALIFIER_STR = "seqnumDuringOpen";<a name="line.447"></a>
+<span class="sourceLineNo">444</span>  public static final String STARTCODE_QUALIFIER_STR = "serverstartcode";<a name="line.444"></a>
+<span class="sourceLineNo">445</span>  /** The startcode column qualifier */<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes(STARTCODE_QUALIFIER_STR);<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
 <span class="sourceLineNo">448</span>  /** The open seqnum column qualifier */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  public static final byte [] SEQNUM_QUALIFIER = Bytes.toBytes(SEQNUM_QUALIFIER_STR);<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>  /** The state column qualifier */<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  public static final String STATE_QUALIFIER_STR = "state";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static final byte [] STATE_QUALIFIER = Bytes.toBytes(STATE_QUALIFIER_STR);<a name="line.454"></a>
+<span class="sourceLineNo">449</span>  public static final String SEQNUM_QUALIFIER_STR = "seqnumDuringOpen";<a name="line.449"></a>
+<span class="sourceLineNo">450</span>  /** The open seqnum column qualifier */<a name="line.450"></a>
+<span class="sourceLineNo">451</span>  public static final byte [] SEQNUM_QUALIFIER = Bytes.toBytes(SEQNUM_QUALIFIER_STR);<a name="line.451"></a>
+<span class="sourceLineNo">452</span><a name="line.452"></a>
+<span class="sourceLineNo">453</span>  /** The state column qualifier */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  public static final String STATE_QUALIFIER_STR = "state";<a name="line.454"></a>
 <span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
-<span class="sourceLineNo">457</span>   * The serverName column qualifier. Its the server where the region is<a name="line.457"></a>
-<span class="sourceLineNo">458</span>   * transitioning on, while column server is the server where the region is<a name="line.458"></a>
-<span class="sourceLineNo">459</span>   * opened on. They are the same when the region is in state OPEN.<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   */<a name="line.460"></a>
-<span class="sourceLineNo">461</span>  public static final String SERVERNAME_QUALIFIER_STR = "sn";<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>  public static final byte [] SERVERNAME_QUALIFIER = Bytes.toBytes(SERVERNAME_QUALIFIER_STR);<a name="line.463"></a>
+<span class="sourceLineNo">456</span>  public static final byte [] STATE_QUALIFIER = Bytes.toBytes(STATE_QUALIFIER_STR);<a name="line.456"></a>
+<span class="sourceLineNo">457</span><a name="line.457"></a>
+<span class="sourceLineNo">458</span>  /**<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   * The serverName column qualifier. Its the server where the region is<a name="line.459"></a>
+<span class="sourceLineNo">460</span>   * transitioning on, while column server is the server where the region is<a name="line.460"></a>
+<span class="sourceLineNo">461</span>   * opened on. They are the same when the region is in state OPEN.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
+<span class="sourceLineNo">463</span>  public static final String SERVERNAME_QUALIFIER_STR = "sn";<a name="line.463"></a>
 <span class="sourceLineNo">464</span><a name="line.464"></a>
-<span class="sourceLineNo">465</span>  /** The lower-half split region column qualifier */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>  public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA");<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>  /** The upper-half split region column qualifier */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB");<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  /** The lower-half merge region column qualifier */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  public static final byte[] MERGEA_QUALIFIER = Bytes.toBytes("mergeA");<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /** The upper-half merge region column qualifier */<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  public static final byte[] MERGEB_QUALIFIER = Bytes.toBytes("mergeB");<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  /** The catalog family as a string*/<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  public static final String TABLE_FAMILY_STR = "table";<a name="line.478"></a>
-<span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>  /** The catalog family */<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  public static final byte [] TABLE_FAMILY = Bytes.toBytes(TABLE_FAMILY_STR);<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>  /** The serialized table state qualifier */<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public static final byte[] TABLE_STATE_QUALIFIER = Bytes.toBytes("state");<a name="line.484"></a>
-<span class="sourceLineNo">485</span><a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>  /**<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * The meta table version column qualifier.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * We keep current version of the meta table in this column in &lt;code&gt;-ROOT-&lt;/code&gt;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * table: i.e. in the 'info:v' column.<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   */<a name="line.491"></a>
-<span class="sourceLineNo">492</span>  public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v");<a name="line.492"></a>
-<span class="sourceLineNo">493</span><a name="line.493"></a>
-<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * The current version of the meta table.<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * - pre-hbase 0.92.  There is no META_VERSION column in the root table<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * in this case. The meta has HTableDescriptor serialized into the HRegionInfo;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * - version 0 is 0.92 and 0.94. Meta data has serialized HRegionInfo's using<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * Writable serialization, and HRegionInfo's does not contain HTableDescriptors.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * - version 1 for 0.96+ keeps HRegionInfo data structures, but changes the<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   * byte[] serialization from Writables to Protobuf.<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * See HRegionInfo.VERSION<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public static final short META_VERSION = 1;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  // Other constants<a name="line.506"></a>
+<span class="sourceLineNo">465</span>  public static final byte [] SERVERNAME_QUALIFIER = Bytes.toBytes(SERVERNAME_QUALIFIER_STR);<a name="line.465"></a>
+<span class="sourceLineNo">466</span><a name="line.466"></a>
+<span class="sourceLineNo">467</span>  /** The lower-half split region column qualifier */<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA");<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /** The upper-half split region column qualifier */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>  public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB");<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  /** The lower-half merge region column qualifier */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  public static final byte[] MERGEA_QUALIFIER = Bytes.toBytes("mergeA");<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /** The upper-half merge region column qualifier */<a name="line.476"></a>
+<span class="sourceLineNo">477</span>  public static final byte[] MERGEB_QUALIFIER = Bytes.toBytes("mergeB");<a name="line.477"></a>
+<span class="sourceLineNo">478</span><a name="line.478"></a>
+<span class="sourceLineNo">479</span>  /** The catalog family as a string*/<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static final String TABLE_FAMILY_STR = "table";<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>  /** The catalog family */<a name="line.482"></a>
+<span class="sourceLineNo">483</span>  public static final byte [] TABLE_FAMILY = Bytes.toBytes(TABLE_FAMILY_STR);<a name="line.483"></a>
+<span class="sourceLineNo">484</span><a name="line.484"></a>
+<span class="sourceLineNo">485</span>  /** The serialized table state qualifier */<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  public static final byte[] TABLE_STATE_QUALIFIER = Bytes.toBytes("state");<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * The meta table version column qualifier.<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * We keep current version of the meta table in this column in &lt;code&gt;-ROOT-&lt;/code&gt;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * table: i.e. in the 'info:v' column.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v");<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
+<span class="sourceLineNo">497</span>   * The current version of the meta table.<a name="line.497"></a>
+<span class="sourceLineNo">498</span>   * - pre-hbase 0.92.  There is no META_VERSION column in the root table<a name="line.498"></a>
+<span class="sourceLineNo">499</span>   * in this case. The meta has HTableDescriptor serialized into the HRegionInfo;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>   * - version 0 is 0.92 and 0.94. Meta data has serialized HRegionInfo's using<a name="line.500"></a>
+<span class="sourceLineNo">501</span>   * Writable serialization, and HRegionInfo's does not contain HTableDescriptors.<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   * - version 1 for 0.96+ keeps HRegionInfo data structures, but changes the<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * byte[] serialization from Writables to Protobuf.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * See HRegionInfo.VERSION<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  public static final short META_VERSION = 1;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  /**<a name=

<TRUNCATED>

[26/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
index 8fcc9fb..611ae09 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html
@@ -279,76 +279,76 @@ extends org.jamon.AbstractTemplateProxy.ImplData</pre>
 <pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.71">m_regionServer</a></pre>
 </li>
 </ul>
-<a name="m_format">
+<a name="m_filter">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_format</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.83">m_format</a></pre>
+<h4>m_filter</h4>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.83">m_filter</a></pre>
 </li>
 </ul>
-<a name="m_format__IsNotDefault">
+<a name="m_filter__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_format__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.88">m_format__IsNotDefault</a></pre>
+<h4>m_filter__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.88">m_filter__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_bcv">
+<a name="m_bcn">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_bcv</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.100">m_bcv</a></pre>
+<h4>m_bcn</h4>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.100">m_bcn</a></pre>
 </li>
 </ul>
-<a name="m_bcv__IsNotDefault">
+<a name="m_bcn__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_bcv__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.105">m_bcv__IsNotDefault</a></pre>
+<h4>m_bcn__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.105">m_bcn__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_bcn">
+<a name="m_format">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_bcn</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.117">m_bcn</a></pre>
+<h4>m_format</h4>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.117">m_format</a></pre>
 </li>
 </ul>
-<a name="m_bcn__IsNotDefault">
+<a name="m_format__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_bcn__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.122">m_bcn__IsNotDefault</a></pre>
+<h4>m_format__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.122">m_format__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_filter">
+<a name="m_bcv">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_filter</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.134">m_filter</a></pre>
+<h4>m_bcv</h4>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.134">m_bcv</a></pre>
 </li>
 </ul>
-<a name="m_filter__IsNotDefault">
+<a name="m_bcv__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>m_filter__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.139">m_filter__IsNotDefault</a></pre>
+<h4>m_bcv__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.139">m_bcv__IsNotDefault</a></pre>
 </li>
 </ul>
 </li>
@@ -394,112 +394,112 @@ extends org.jamon.AbstractTemplateProxy.ImplData</pre>
 <pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.67">getRegionServer</a>()</pre>
 </li>
 </ul>
-<a name="setFormat(java.lang.String)">
+<a name="setFilter(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setFormat</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.73">setFormat</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;format)</pre>
+<h4>setFilter</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.73">setFilter</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;filter)</pre>
 </li>
 </ul>
-<a name="getFormat()">
+<a name="getFilter()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getFormat</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.79">getFormat</a>()</pre>
+<h4>getFilter</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.79">getFilter</a>()</pre>
 </li>
 </ul>
-<a name="getFormat__IsNotDefault()">
+<a name="getFilter__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getFormat__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.84">getFormat__IsNotDefault</a>()</pre>
+<h4>getFilter__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.84">getFilter__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setBcv(java.lang.String)">
+<a name="setBcn(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setBcv</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.90">setBcv</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;bcv)</pre>
+<h4>setBcn</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.90">setBcn</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;bcn)</pre>
 </li>
 </ul>
-<a name="getBcv()">
+<a name="getBcn()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getBcv</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.96">getBcv</a>()</pre>
+<h4>getBcn</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.96">getBcn</a>()</pre>
 </li>
 </ul>
-<a name="getBcv__IsNotDefault()">
+<a name="getBcn__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getBcv__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.101">getBcv__IsNotDefault</a>()</pre>
+<h4>getBcn__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.101">getBcn__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setBcn(java.lang.String)">
+<a name="setFormat(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setBcn</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.107">setBcn</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;bcn)</pre>
+<h4>setFormat</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.107">setFormat</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;format)</pre>
 </li>
 </ul>
-<a name="getBcn()">
+<a name="getFormat()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getBcn</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.113">getBcn</a>()</pre>
+<h4>getFormat</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.113">getFormat</a>()</pre>
 </li>
 </ul>
-<a name="getBcn__IsNotDefault()">
+<a name="getFormat__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getBcn__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.118">getBcn__IsNotDefault</a>()</pre>
+<h4>getFormat__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.118">getFormat__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setFilter(java.lang.String)">
+<a name="setBcv(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setFilter</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.124">setFilter</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;filter)</pre>
+<h4>setBcv</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.124">setBcv</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;bcv)</pre>
 </li>
 </ul>
-<a name="getFilter()">
+<a name="getBcv()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getFilter</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.130">getFilter</a>()</pre>
+<h4>getBcv</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.130">getBcv</a>()</pre>
 </li>
 </ul>
-<a name="getFilter__IsNotDefault()">
+<a name="getBcv__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>getFilter__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.135">getFilter__IsNotDefault</a>()</pre>
+<h4>getBcv__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html#line.135">getBcv__IsNotDefault</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
index 990dd14..7b40100 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html
@@ -283,40 +283,40 @@ extends org.jamon.AbstractTemplateProxy</pre>
 <!--   -->
 </a>
 <h3>Field Detail</h3>
-<a name="format">
+<a name="filter">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>format</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.151">format</a></pre>
+<h4>filter</h4>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.151">filter</a></pre>
 </li>
 </ul>
-<a name="bcv">
+<a name="bcn">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>bcv</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.158">bcv</a></pre>
+<h4>bcn</h4>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.158">bcn</a></pre>
 </li>
 </ul>
-<a name="bcn">
+<a name="format">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>bcn</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.165">bcn</a></pre>
+<h4>format</h4>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.165">format</a></pre>
 </li>
 </ul>
-<a name="filter">
+<a name="bcv">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>filter</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.172">filter</a></pre>
+<h4>bcv</h4>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.172">bcv</a></pre>
 </li>
 </ul>
 </li>
@@ -388,40 +388,40 @@ extends org.jamon.AbstractTemplateProxy</pre>
 </dl>
 </li>
 </ul>
-<a name="setFormat(java.lang.String)">
+<a name="setFilter(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setFormat</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">RSStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.152">setFormat</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_format)</pre>
+<h4>setFilter</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">RSStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.152">setFilter</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_filter)</pre>
 </li>
 </ul>
-<a name="setBcv(java.lang.String)">
+<a name="setBcn(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setBcv</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">RSStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.159">setBcv</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_bcv)</pre>
+<h4>setBcn</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">RSStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.159">setBcn</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_bcn)</pre>
 </li>
 </ul>
-<a name="setBcn(java.lang.String)">
+<a name="setFormat(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setBcn</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">RSStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.166">setBcn</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_bcn)</pre>
+<h4>setFormat</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">RSStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.166">setFormat</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_format)</pre>
 </li>
 </ul>
-<a name="setFilter(java.lang.String)">
+<a name="setBcv(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setFilter</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">RSStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.173">setFilter</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_filter)</pre>
+<h4>setBcv</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">RSStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html#line.173">setBcv</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_bcv)</pre>
 </li>
 </ul>
 <a name="constructImpl(java.lang.Class)">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
index 2fe9fef..094392d 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html
@@ -224,40 +224,40 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/tmpl/regionserver/
 <pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.28">regionServer</a></pre>
 </li>
 </ul>
-<a name="format">
+<a name="filter">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>format</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.29">format</a></pre>
+<h4>filter</h4>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.29">filter</a></pre>
 </li>
 </ul>
-<a name="bcv">
+<a name="bcn">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>bcv</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.30">bcv</a></pre>
+<h4>bcn</h4>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.30">bcn</a></pre>
 </li>
 </ul>
-<a name="bcn">
+<a name="format">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>bcn</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.31">bcn</a></pre>
+<h4>format</h4>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.31">format</a></pre>
 </li>
 </ul>
-<a name="filter">
+<a name="bcv">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>filter</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.32">filter</a></pre>
+<h4>bcv</h4>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmplImpl.html#line.32">bcv</a></pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html b/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html
index cc78fa6..c763f78 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/Counter.html
@@ -124,11 +124,11 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code>(package private) static <a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a></code></td>
-<td class="colLast"><span class="strong">HFile.</span><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html#checksumFailures">checksumFailures</a></strong></code>&nbsp;</td>
+<td class="colLast"><span class="strong">HFile.</span><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html#CHECKSUM_FAILURES">CHECKSUM_FAILURES</a></strong></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a></code></td>
-<td class="colLast"><span class="strong">HFile.</span><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html#dataBlockReadCnt">dataBlockReadCnt</a></strong></code>&nbsp;</td>
+<td class="colLast"><span class="strong">HFile.</span><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html#DATABLOCK_READ_COUNT">DATABLOCK_READ_COUNT</a></strong></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a></code></td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index a9dc5c6..7d826cc 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -471,14 +471,14 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/FanOutOneBlockAsyncDFSOutput.State.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">FanOutOneBlockAsyncDFSOutput.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">PrettyPrinter.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">PoolMap.PoolType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">PrettyPrinter.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="strong">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index 2cda76d..1f810f3 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -148,9 +148,9 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="strong">RegionGroupingProvider.Strategies</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALKey.Version.html" title="enum in org.apache.hadoop.hbase.wal"><span class="strong">WALKey.Version</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="strong">WALFactory.Providers</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="strong">RegionGroupingProvider.Strategies</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/overview-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index a535004..4a55386 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -3569,6 +3569,7 @@
 </li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFile.Reader</span></a> (also extends org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.CachingBlockReader</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.Writer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFile.Writer</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileScanner</span></a> (also extends org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/HTableInterface.html" title="interface in org.apache.hadoop.hbase.client"><span class="strong">HTableInterface</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver"><span class="strong">InternalScanner</span></a>
 <ul>
@@ -3650,6 +3651,7 @@
 </li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFile.Reader</span></a> (also extends java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.CachingBlockReader</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.Writer.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFile.Writer</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileScanner</span></a> (also extends java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/HTableInterface.html" title="interface in org.apache.hadoop.hbase.client"><span class="strong">HTableInterface</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver"><span class="strong">InternalScanner</span></a>
 <ul>
@@ -4051,7 +4053,7 @@
 <li type="circle">org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/ShareableMemory.html" title="interface in org.apache.hadoop.hbase"><span class="strong">ShareableMemory</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver"><span class="strong">Shipper</span></a>
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileScanner</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile"><span class="strong">HFileScanner</span></a> (also extends java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver"><span class="strong">KeyValueScanner</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/RegionScanner.html" title="interface in org.apache.hadoop.hbase.regionserver"><span class="strong">RegionScanner</span></a> (also extends org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>)</li>
 </ul>


[29/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html
index be3bd58..30d7179 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.820">SplitLogManager.TerminationStatus</a>
+<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.819">SplitLogManager.TerminationStatus</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a>&gt;</pre>
 </li>
 </ul>
@@ -225,7 +225,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>IN_PROGRESS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.821">IN_PROGRESS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.820">IN_PROGRESS</a></pre>
 </li>
 </ul>
 <a name="SUCCESS">
@@ -234,7 +234,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>SUCCESS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.821">SUCCESS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.820">SUCCESS</a></pre>
 </li>
 </ul>
 <a name="FAILURE">
@@ -243,7 +243,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>FAILURE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.821">FAILURE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.820">FAILURE</a></pre>
 </li>
 </ul>
 <a name="DELETED">
@@ -252,7 +252,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DELETED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.821">DELETED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.820">DELETED</a></pre>
 </li>
 </ul>
 </li>
@@ -269,7 +269,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>statusMsg</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.823">statusMsg</a></pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.822">statusMsg</a></pre>
 </li>
 </ul>
 </li>
@@ -286,7 +286,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.624">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.623">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -303,7 +303,7 @@ for (SplitLogManager.TerminationStatus c : SplitLogManager.TerminationStatus.val
 <ul class="blockList">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.624">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.623">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 
@@ -321,7 +321,7 @@ not permitted.)</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.830">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html#line.829">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master">SplitLogManager.TerminationStatus</a>&gt;</code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html
index 330816f..7243fdd 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.690">SplitLogManager.TimeoutMonitor</a>
+<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.689">SplitLogManager.TimeoutMonitor</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></pre>
 <div class="block">Periodically checks all active tasks and resubmits the ones that have timed out</div>
 </li>
@@ -207,7 +207,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>lastLog</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html#line.691">lastLog</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html#line.690">lastLog</a></pre>
 </li>
 </ul>
 </li>
@@ -224,7 +224,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SplitLogManager.TimeoutMonitor</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html#line.693">SplitLogManager.TimeoutMonitor</a>(int&nbsp;period,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html#line.692">SplitLogManager.TimeoutMonitor</a>(int&nbsp;period,
                               <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a>&nbsp;stopper)</pre>
 </li>
 </ul>
@@ -242,7 +242,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>chore</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html#line.698">chore</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html#line.697">chore</a>()</pre>
 <div class="block"><strong>Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html#chore()">ScheduledChore</a></code></strong></div>
 <div class="block">The task to execute on each scheduled execution of the Chore</div>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.html b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.html
index 6b1a1c0..d511f7f 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/SplitLogManager.html
@@ -96,7 +96,7 @@
 <hr>
 <br>
 <pre><a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.101">SplitLogManager</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.100">SplitLogManager</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Distributes the task of log splitting to the available region servers.
  Coordination happens via coordination engine. For every log file that has to be split a
@@ -428,7 +428,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.102">LOG</a></pre>
+<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.101">LOG</a></pre>
 </li>
 </ul>
 <a name="server">
@@ -437,7 +437,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>server</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.104">server</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.103">server</a></pre>
 </li>
 </ul>
 <a name="stopper">
@@ -446,7 +446,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>stopper</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.106">stopper</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.105">stopper</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -455,7 +455,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.107">conf</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.106">conf</a></pre>
 </li>
 </ul>
 <a name="choreService">
@@ -464,7 +464,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>choreService</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ChoreService.html" title="class in org.apache.hadoop.hbase">ChoreService</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.108">choreService</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ChoreService.html" title="class in org.apache.hadoop.hbase">ChoreService</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.107">choreService</a></pre>
 </li>
 </ul>
 <a name="DEFAULT_UNASSIGNED_TIMEOUT">
@@ -473,7 +473,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_UNASSIGNED_TIMEOUT</h4>
-<pre>public static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.110">DEFAULT_UNASSIGNED_TIMEOUT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.109">DEFAULT_UNASSIGNED_TIMEOUT</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.master.SplitLogManager.DEFAULT_UNASSIGNED_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -483,7 +483,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>unassignedTimeout</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.112">unassignedTimeout</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.111">unassignedTimeout</a></pre>
 </li>
 </ul>
 <a name="lastTaskCreateTime">
@@ -492,7 +492,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>lastTaskCreateTime</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.113">lastTaskCreateTime</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.112">lastTaskCreateTime</a></pre>
 </li>
 </ul>
 <a name="checkRecoveringTimeThreshold">
@@ -501,7 +501,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>checkRecoveringTimeThreshold</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.114">checkRecoveringTimeThreshold</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.113">checkRecoveringTimeThreshold</a></pre>
 </li>
 </ul>
 <a name="failedRecoveringRegionDeletions">
@@ -510,7 +510,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>failedRecoveringRegionDeletions</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&gt;&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.115">failedRecoveringRegionDeletions</a></pre>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&gt;&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.114">failedRecoveringRegionDeletions</a></pre>
 </li>
 </ul>
 <a name="recoveringRegionLock">
@@ -519,7 +519,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>recoveringRegionLock</h4>
-<pre>protected final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/ReentrantLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.122">recoveringRegionLock</a></pre>
+<pre>protected final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/ReentrantLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.121">recoveringRegionLock</a></pre>
 <div class="block">In distributedLogReplay mode, we need touch both splitlog and recovering-regions znodes in one
  operation. So the lock is used to guard such cases.</div>
 </li>
@@ -530,7 +530,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>tasks</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.Task.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.Task</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.124">tasks</a></pre>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.Task.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.Task</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.123">tasks</a></pre>
 </li>
 </ul>
 <a name="timeoutMonitor">
@@ -539,7 +539,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>timeoutMonitor</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TimeoutMonitor</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.125">timeoutMonitor</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TimeoutMonitor.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TimeoutMonitor</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.124">timeoutMonitor</a></pre>
 </li>
 </ul>
 <a name="deadWorkers">
@@ -548,7 +548,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>deadWorkers</h4>
-<pre>private volatile&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.127">deadWorkers</a></pre>
+<pre>private volatile&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.126">deadWorkers</a></pre>
 </li>
 </ul>
 <a name="deadWorkersLock">
@@ -557,7 +557,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>deadWorkersLock</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.128">deadWorkersLock</a></pre>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.127">deadWorkersLock</a></pre>
 </li>
 </ul>
 </li>
@@ -574,7 +574,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SplitLogManager</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.140">SplitLogManager</a>(<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;server,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.139">SplitLogManager</a>(<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;server,
                org.apache.hadoop.conf.Configuration&nbsp;conf,
                <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a>&nbsp;stopper,
                <a href="../../../../../org/apache/hadoop/hbase/master/MasterServices.html" title="interface in org.apache.hadoop.hbase.master">MasterServices</a>&nbsp;master,
@@ -601,7 +601,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileList</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileStatus[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.165">getFileList</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;logDirs,
+<pre>private&nbsp;org.apache.hadoop.fs.FileStatus[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.164">getFileList</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;logDirs,
                                             org.apache.hadoop.fs.PathFilter&nbsp;filter)
                                                throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -614,7 +614,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileList</h4>
-<pre>public static&nbsp;org.apache.hadoop.fs.FileStatus[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.181">getFileList</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;org.apache.hadoop.fs.FileStatus[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.180">getFileList</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                             <a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;logDirs,
                                             org.apache.hadoop.fs.PathFilter&nbsp;filter)
                                                      throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -637,7 +637,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>splitLogDistributed</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.208">splitLogDistributed</a>(org.apache.hadoop.fs.Path&nbsp;logDir)
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.207">splitLogDistributed</a>(org.apache.hadoop.fs.Path&nbsp;logDir)
                          throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>logDir</code> - one region sever wal dir path in .logs</dd>
 <dt><span class="strong">Returns:</span></dt><dd>cumulative size of the logfiles split</dd>
@@ -652,7 +652,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>splitLogDistributed</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.222">splitLogDistributed</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;logDirs)
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.221">splitLogDistributed</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;logDirs)
                          throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">The caller will block until all the log files of the given region server have been processed -
  successfully split or an error is encountered - by an available worker region server. This
@@ -669,7 +669,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>splitLogDistributed</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.250">splitLogDistributed</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;serverNames,
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.249">splitLogDistributed</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;serverNames,
                        <a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;logDirs,
                        org.apache.hadoop.fs.PathFilter&nbsp;filter)
                          throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -688,7 +688,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>enqueueSplitTask</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.327">enqueueSplitTask</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;taskname,
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.326">enqueueSplitTask</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;taskname,
                        <a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TaskBatch</a>&nbsp;batch)</pre>
 <div class="block">Add a task entry to coordination if it is not already there.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>taskname</code> - the path of the log to be split</dd><dd><code>batch</code> - the batch this task belongs to</dd>
@@ -701,7 +701,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForSplittingCompletion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.342">waitForSplittingCompletion</a>(<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TaskBatch</a>&nbsp;batch,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.341">waitForSplittingCompletion</a>(<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TaskBatch</a>&nbsp;batch,
                               <a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)</pre>
 </li>
 </ul>
@@ -711,7 +711,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getTasks</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.Task.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.Task</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.383">getTasks</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.Task.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.Task</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.382">getTasks</a>()</pre>
 </li>
 </ul>
 <a name="activeTasks(org.apache.hadoop.hbase.master.SplitLogManager.TaskBatch)">
@@ -720,7 +720,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>activeTasks</h4>
-<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.387">activeTasks</a>(<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TaskBatch</a>&nbsp;batch)</pre>
+<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.386">activeTasks</a>(<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TaskBatch</a>&nbsp;batch)</pre>
 </li>
 </ul>
 <a name="removeRecoveringRegions(java.util.Set, java.lang.Boolean)">
@@ -729,7 +729,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>removeRecoveringRegions</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.404">removeRecoveringRegions</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;serverNames,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.403">removeRecoveringRegions</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;serverNames,
                            <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;isMetaRecovery)</pre>
 <div class="block">It removes recovering regions under /hbase/recovering-regions/[encoded region name] so that the
  region server hosting the region can allow reads to the recovered region</div>
@@ -742,7 +742,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>removeStaleRecoveringRegions</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.438">removeStaleRecoveringRegions</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;failedServers)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.437">removeStaleRecoveringRegions</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;failedServers)
                             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                    <a href="http://docs.oracle.com/javase/7/docs/api/java/io/InterruptedIOException.html?is-external=true" title="class or interface in java.io">InterruptedIOException</a></pre>
 <div class="block">It removes stale recovering regions under /hbase/recovering-regions/[encoded region name]
@@ -759,7 +759,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>createTaskIfAbsent</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.Task.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.Task</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.461">createTaskIfAbsent</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.Task.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.Task</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.460">createTaskIfAbsent</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
                                       <a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TaskBatch.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.TaskBatch</a>&nbsp;batch)</pre>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>path</code> - </dd><dd><code>batch</code> - </dd>
 <dt><span class="strong">Returns:</span></dt><dd>null on success, existing task on error</dd></dl>
@@ -771,7 +771,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>findOrCreateOrphanTask</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.Task.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.Task</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.520">findOrCreateOrphanTask</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.Task.html" title="class in org.apache.hadoop.hbase.master">SplitLogManager.Task</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.519">findOrCreateOrphanTask</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 </li>
 </ul>
 <a name="stop()">
@@ -780,7 +780,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.532">stop</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.531">stop</a>()</pre>
 </li>
 </ul>
 <a name="handleDeadWorker(org.apache.hadoop.hbase.ServerName)">
@@ -789,7 +789,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>handleDeadWorker</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.541">handleDeadWorker</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;workerName)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.540">handleDeadWorker</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;workerName)</pre>
 </li>
 </ul>
 <a name="handleDeadWorkers(java.util.Set)">
@@ -798,7 +798,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>handleDeadWorkers</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.553">handleDeadWorkers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;serverNames)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.552">handleDeadWorkers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;serverNames)</pre>
 </li>
 </ul>
 <a name="setRecoveryMode(boolean)">
@@ -807,7 +807,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>setRecoveryMode</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.569">setRecoveryMode</a>(boolean&nbsp;isForInitialization)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.568">setRecoveryMode</a>(boolean&nbsp;isForInitialization)
                      throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This function is to set recovery mode from outstanding split log tasks from before or current
  configuration setting</div>
@@ -822,7 +822,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>markRegionsRecovering</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.575">markRegionsRecovering</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;server,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.574">markRegionsRecovering</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;server,
                          <a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a>&gt;&nbsp;userRegions)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/InterruptedIOException.html?is-external=true" title="class or interface in java.io">InterruptedIOException</a>,
                                   <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -837,7 +837,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isLogReplaying</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.594">isLogReplaying</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.593">isLogReplaying</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>whether log is replaying</dd></dl>
 </li>
 </ul>
@@ -847,7 +847,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isLogSplitting</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.603">isLogSplitting</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.602">isLogSplitting</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>whether log is splitting</dd></dl>
 </li>
 </ul>
@@ -857,7 +857,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getRecoveryMode</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.612">getRecoveryMode</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/SplitLogManager.html#line.611">getRecoveryMode</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the current log recovery mode</dd></dl>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index e541514..c2d1553 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -318,11 +318,11 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="strong">SplitLogManager.TerminationStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="strong">SplitLogManager.ResubmitDirective</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="strong">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="strong">RegionState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="strong">MasterRpcServices.BalanceSwitchMode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="strong">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="strong">SplitLogManager.TerminationStatus</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index aaf26d9..8d719f7 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -145,8 +145,8 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.MarkRegionOfflineOpResult.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="strong">DisableTableProcedure.MarkRegionOfflineOpResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="strong">TableProcedureInterface.TableOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.MarkRegionOfflineOpResult.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="strong">DisableTableProcedure.MarkRegionOfflineOpResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="strong">ServerProcedureInterface.ServerOperationType</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html b/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html
index 45d522c..a92409d 100644
--- a/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html
+++ b/devapidocs/org/apache/hadoop/hbase/nio/class-use/ByteBuff.html
@@ -505,7 +505,7 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a></code></td>
 <td class="colLast"><span class="strong">HFileBlock.</span><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#getBufferReadOnly()">getBufferReadOnly</a></strong>()</code>
 <div class="block">Returns the buffer this block stores internally.</div>
 </td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 2d36786..3dcefc2 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -352,8 +352,8 @@
 </ul>
 <h2 title="Annotation Type Hierarchy">Annotation Type Hierarchy</h2>
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/VersionAnnotation.html" title="annotation in org.apache.hadoop.hbase"><span class="strong">VersionAnnotation</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaMutationAnnotation.html" title="annotation in org.apache.hadoop.hbase"><span class="strong">MetaMutationAnnotation</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/VersionAnnotation.html" title="annotation in org.apache.hadoop.hbase"><span class="strong">VersionAnnotation</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
 </ul>
 <h2 title="Enum Hierarchy">Enum Hierarchy</h2>
 <ul>
@@ -361,14 +361,14 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="strong">HealthChecker.HealthCheckerExitStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="strong">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="strong">MetaTableAccessor.QueryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="strong">Coprocessor.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="strong">KeyValue.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="strong">HConstants.OperationStatusCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="strong">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="strong">MetaTableAccessor.QueryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="strong">Coprocessor.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="strong">KeepDeletedCells</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase"><span class="strong">HConstants.Modify</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="strong">HealthChecker.HealthCheckerExitStatus</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index a73d0a7..19ef617 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -120,8 +120,8 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="strong">RootProcedureState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="strong">StateMachineProcedure.Flow</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="strong">RootProcedureState.State</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index bf598a0..b0b01d7 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -172,11 +172,11 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="strong">ThrottlingException.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="strong">QuotaType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="strong">OperationQuota.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="strong">QuotaScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="strong">ThrottleType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="strong">QuotaType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="strong">QuotaScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="strong">ThrottlingException.Type</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
index 26ed048..6ac8954 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>private final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2121">HStore.StoreFlusherImpl</a>
+<pre>private final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2124">HStore.StoreFlusherImpl</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a></pre>
 </li>
@@ -252,7 +252,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheFlushSeqNum</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2123">cacheFlushSeqNum</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2126">cacheFlushSeqNum</a></pre>
 </li>
 </ul>
 <a name="snapshot">
@@ -261,7 +261,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>snapshot</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2124">snapshot</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2127">snapshot</a></pre>
 </li>
 </ul>
 <a name="tempFiles">
@@ -270,7 +270,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>tempFiles</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2125">tempFiles</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2128">tempFiles</a></pre>
 </li>
 </ul>
 <a name="committedFiles">
@@ -279,7 +279,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>committedFiles</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2126">committedFiles</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2129">committedFiles</a></pre>
 </li>
 </ul>
 <a name="cacheFlushCount">
@@ -288,7 +288,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheFlushCount</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2127">cacheFlushCount</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2130">cacheFlushCount</a></pre>
 </li>
 </ul>
 <a name="cacheFlushSize">
@@ -297,7 +297,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>cacheFlushSize</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2128">cacheFlushSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2131">cacheFlushSize</a></pre>
 </li>
 </ul>
 </li>
@@ -314,7 +314,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HStore.StoreFlusherImpl</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2130">HStore.StoreFlusherImpl</a>(long&nbsp;cacheFlushSeqNum)</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2133">HStore.StoreFlusherImpl</a>(long&nbsp;cacheFlushSeqNum)</pre>
 </li>
 </ul>
 </li>
@@ -331,7 +331,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>prepare</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2139">prepare</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2142">prepare</a>()</pre>
 <div class="block">This is not thread safe. The caller should have a lock on the region or the store.
  If necessary, the lock can be added with the patch provided in HBASE-10087</div>
 <dl>
@@ -346,7 +346,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>flushCache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2148">flushCache</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2151">flushCache</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
                 throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#flushCache(org.apache.hadoop.hbase.monitoring.MonitoredTask)">StoreFlushContext</a></code></strong></div>
 <div class="block">Flush the cache (create the new store file)
@@ -366,7 +366,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>commit</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2156">commit</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2159">commit</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
                throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#commit(org.apache.hadoop.hbase.monitoring.MonitoredTask)">StoreFlushContext</a></code></strong></div>
 <div class="block">Commit the flush - add the store file to the store and clear the
@@ -389,7 +389,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>getCommittedFiles</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2195">getCommittedFiles</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2198">getCommittedFiles</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#getCommittedFiles()">StoreFlushContext</a></code></strong></div>
 <div class="block">Returns the newly committed files from the flush. Called only if commit returns true</div>
 <dl>
@@ -404,7 +404,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>replayFlush</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2208">replayFlush</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;fileNames,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2211">replayFlush</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;fileNames,
                boolean&nbsp;dropMemstoreSnapshot)
                  throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Similar to commit, but called in secondary region replicas for replaying the
@@ -424,7 +424,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>abort</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2238">abort</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2241">abort</a>()
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Abort the snapshot preparation. Drops the snapshot if any.</div>
 <dl>


[42/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index cc11312..8de0c8a 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -3672,7 +3672,7 @@ service.</div>
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><span class="strong">HFileBlock.Writer.</span><code><strong><a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#write(org.apache.hadoop.hbase.Cell)">write</a></strong>(<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)</code>
 <div class="block">Writes the Cell to this block</div>
 </td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
index 42917b6..7c39993 100644
--- a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
+++ b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
@@ -2896,7 +2896,7 @@ service.</div>
 <tr class="rowColor">
 <td class="colFirst"><code>class&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></strong></code>
-<div class="block">Reading <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and 2 blocks, and writing version 2 blocks.</div>
+<div class="block">Reads <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and version 2 blocks but writes version 2 blocks only.</div>
 </td>
 </tr>
 <tr class="altColor">
@@ -3034,7 +3034,7 @@ service.</div>
 <td class="colFirst"><code>class&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></strong></code>
 <div class="block">BucketCache uses <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketAllocator</code></a> to allocate/free blocks, and uses
- <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ramCache"><code>BucketCache.ramCache</code></a> and <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#backingMap"><code>BucketCache.backingMap</code></a> in order to
+ BucketCache#ramCache and BucketCache#backingMap in order to
  determine if a given element is in the cache.</div>
 </td>
 </tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
index 20f2daa..1a288a4 100644
--- a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
+++ b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Unstable.html
@@ -136,19 +136,19 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/http/package-summary.html">org.apache.hadoop.hbase.http</a></td>
+<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/http/lib/package-summary.html">org.apache.hadoop.hbase.http.lib</a></td>
 <td class="colLast">
 <div class="block">
- Copied from hadoop source code.<br>
- See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.</div>
+ This package provides user-selectable (via configuration) classes that add
+ functionality to the web UI.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/http/lib/package-summary.html">org.apache.hadoop.hbase.http.lib</a></td>
+<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/http/package-summary.html">org.apache.hadoop.hbase.http</a></td>
 <td class="colLast">
 <div class="block">
- This package provides user-selectable (via configuration) classes that add
- functionality to the web UI.</div>
+ Copied from hadoop source code.<br>
+ See https://issues.apache.org/jira/browse/HADOOP-10232 to know why.</div>
 </td>
 </tr>
 </tbody>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
index 3fbe048..40142e5 100644
--- a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
@@ -80,12 +80,12 @@
 </ul>
 <h2 title="Annotation Type Hierarchy">Annotation Type Hierarchy</h2>
 <ul>
+<li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Public.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceAudience.Public</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceStability.Unstable.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceStability.Unstable</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
-<li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceAudience.Private</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
+<li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceStability.Stable.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceStability.Stable</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.LimitedPrivate.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceAudience.LimitedPrivate</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceStability.Evolving.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceStability.Evolving</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
-<li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Public.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceAudience.Public</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
-<li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceStability.Stable.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceStability.Stable</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
+<li type="circle">org.apache.hadoop.hbase.classification.<a href="../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification"><span class="strong">InterfaceAudience.Private</span></a> (implements java.lang.annotation.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>)</li>
 </ul>
 </div>
 <!-- ======= START OF BOTTOM NAVBAR ====== -->

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 33c0206..267c78e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -389,12 +389,12 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">IsolationLevel</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">Consistency</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">IsolationLevel</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">TableState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">Admin.MasterSwitchType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcess.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">AsyncProcess.Retry</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Admin.CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">Admin.CompactType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">TableState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="strong">Durability</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html
index ceaf9eb..2c1f3ff 100644
--- a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.377">PrefixTreeSeeker.OffheapPrefixTreeCell</a>
+<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.376">PrefixTreeSeeker.OffheapPrefixTreeCell</a>
 extends <a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>, <a href="../../../../../../org/apache/hadoop/hbase/SettableSequenceId.html" title="interface in org.apache.hadoop.hbase">SettableSequenceId</a>, <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></pre>
 </li>
@@ -403,7 +403,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>FIXED_OVERHEAD</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.379">FIXED_OVERHEAD</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.378">FIXED_OVERHEAD</a></pre>
 </li>
 </ul>
 <a name="rowBuff">
@@ -412,7 +412,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>rowBuff</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.382">rowBuff</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.381">rowBuff</a></pre>
 </li>
 </ul>
 <a name="rowLength">
@@ -421,7 +421,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>rowLength</h4>
-<pre>private&nbsp;short <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.383">rowLength</a></pre>
+<pre>private&nbsp;short <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.382">rowLength</a></pre>
 </li>
 </ul>
 <a name="famBuff">
@@ -430,7 +430,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>famBuff</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.384">famBuff</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.383">famBuff</a></pre>
 </li>
 </ul>
 <a name="famLength">
@@ -439,7 +439,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>famLength</h4>
-<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.385">famLength</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.384">famLength</a></pre>
 </li>
 </ul>
 <a name="qualBuff">
@@ -448,7 +448,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>qualBuff</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.386">qualBuff</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.385">qualBuff</a></pre>
 </li>
 </ul>
 <a name="qualLength">
@@ -457,7 +457,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>qualLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.387">qualLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.386">qualLength</a></pre>
 </li>
 </ul>
 <a name="val">
@@ -466,7 +466,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>val</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.388">val</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.387">val</a></pre>
 </li>
 </ul>
 <a name="valOffset">
@@ -475,7 +475,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>valOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.389">valOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.388">valOffset</a></pre>
 </li>
 </ul>
 <a name="valLength">
@@ -484,7 +484,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>valLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.390">valLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.389">valLength</a></pre>
 </li>
 </ul>
 <a name="tagBuff">
@@ -493,7 +493,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>tagBuff</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.391">tagBuff</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.390">tagBuff</a></pre>
 </li>
 </ul>
 <a name="tagsLength">
@@ -502,7 +502,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.392">tagsLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.391">tagsLength</a></pre>
 </li>
 </ul>
 <a name="ts">
@@ -511,7 +511,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>ts</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.393">ts</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.392">ts</a></pre>
 </li>
 </ul>
 <a name="seqId">
@@ -520,7 +520,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>seqId</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.394">seqId</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.393">seqId</a></pre>
 </li>
 </ul>
 <a name="type">
@@ -529,7 +529,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>type</h4>
-<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.395">type</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.394">type</a></pre>
 </li>
 </ul>
 </li>
@@ -546,7 +546,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PrefixTreeSeeker.OffheapPrefixTreeCell</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.396">PrefixTreeSeeker.OffheapPrefixTreeCell</a>(byte[]&nbsp;row,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.395">PrefixTreeSeeker.OffheapPrefixTreeCell</a>(byte[]&nbsp;row,
                                       int&nbsp;rowOffset,
                                       short&nbsp;rowLength,
                                       byte[]&nbsp;fam,
@@ -580,7 +580,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>setSequenceId</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.425">setSequenceId</a>(long&nbsp;seqId)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.424">setSequenceId</a>(long&nbsp;seqId)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/SettableSequenceId.html#setSequenceId(long)">SettableSequenceId</a></code></strong></div>
 <div class="block">Sets with the given seqId.</div>
 <dl>
@@ -595,7 +595,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.430">getRowArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.429">getRowArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.</div>
@@ -611,7 +611,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.435">getRowOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.434">getRowOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowOffset()">getRowOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -624,7 +624,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowLength</h4>
-<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.440">getRowLength</a>()</pre>
+<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.439">getRowLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowLength()">getRowLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -637,7 +637,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.445">getFamilyArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.444">getFamilyArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous bytes composed of legal HDFS filename characters which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.</div>
@@ -653,7 +653,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.450">getFamilyOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.449">getFamilyOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyOffset()">getFamilyOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -666,7 +666,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyLength</h4>
-<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.455">getFamilyLength</a>()</pre>
+<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.454">getFamilyLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyLength()">getFamilyLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -679,7 +679,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.460">getQualifierArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.459">getQualifierArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.</div>
@@ -695,7 +695,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.465">getQualifierOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.464">getQualifierOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierOffset()">getQualifierOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -708,7 +708,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.470">getQualifierLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.469">getQualifierLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierLength()">getQualifierLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -721,7 +721,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTimestamp</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.475">getTimestamp</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.474">getTimestamp</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTimestamp()">getTimestamp</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -735,7 +735,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTypeByte</h4>
-<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.480">getTypeByte</a>()</pre>
+<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.479">getTypeByte</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTypeByte()">getTypeByte</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -748,7 +748,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getSequenceId</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.485">getSequenceId</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.484">getSequenceId</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getSequenceId()">Cell</a></code></strong></div>
 <div class="block">A region-specific unique monotonically increasing sequence ID given to each Cell. It always
  exists for cells in the memstore but is not retained forever. It will be kept for
@@ -766,7 +766,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.490">getValueArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.489">getValueArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Integer.MAX_VALUE which is 2,147,483,648 bytes.</div>
@@ -782,7 +782,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.497">getValueOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.496">getValueOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueOffset()">getValueOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -795,7 +795,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.502">getValueLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.501">getValueLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueLength()">getValueLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -808,7 +808,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.507">getTagsArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.506">getTagsArray</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsArray()">getTagsArray</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -821,7 +821,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.512">getTagsOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.511">getTagsOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsOffset()">getTagsOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -834,7 +834,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.517">getTagsLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.516">getTagsLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsLength()">getTagsLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -847,7 +847,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.522">getRowByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.521">getRowByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getRowByteBuffer()">getRowByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -860,7 +860,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowPosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.527">getRowPosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.526">getRowPosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getRowPosition()">getRowPosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -873,7 +873,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.532">getFamilyByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.531">getFamilyByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getFamilyByteBuffer()">getFamilyByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -886,7 +886,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyPosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.537">getFamilyPosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.536">getFamilyPosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getFamilyPosition()">getFamilyPosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -899,7 +899,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.542">getQualifierByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.541">getQualifierByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getQualifierByteBuffer()">getQualifierByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -912,7 +912,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierPosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.547">getQualifierPosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.546">getQualifierPosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getQualifierPosition()">getQualifierPosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -925,7 +925,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.552">getTagsByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.551">getTagsByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getTagsByteBuffer()">getTagsByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -938,7 +938,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsPosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.557">getTagsPosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.556">getTagsPosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getTagsPosition()">getTagsPosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -951,7 +951,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueByteBuffer</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.562">getValueByteBuffer</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.561">getValueByteBuffer</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getValueByteBuffer()">getValueByteBuffer</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -964,7 +964,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValuePosition</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.567">getValuePosition</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.566">getValuePosition</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html#getValuePosition()">getValuePosition</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/ByteBufferedCell.html" title="class in org.apache.hadoop.hbase">ByteBufferedCell</a></code></dd>
@@ -977,7 +977,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.572">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.571">heapSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize()">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -991,7 +991,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.577">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OffheapPrefixTreeCell.html#line.576">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html
index c7117ff..1ef9cd4 100644
--- a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.222">PrefixTreeSeeker.OnheapPrefixTreeCell</a>
+<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.221">PrefixTreeSeeker.OnheapPrefixTreeCell</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>, <a href="../../../../../../org/apache/hadoop/hbase/SettableSequenceId.html" title="interface in org.apache.hadoop.hbase">SettableSequenceId</a>, <a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></pre>
 <div class="block">Cloned version of the PrefixTreeCell where except the value part, the rest
@@ -360,7 +360,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>FIXED_OVERHEAD</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.223">FIXED_OVERHEAD</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.222">FIXED_OVERHEAD</a></pre>
 </li>
 </ul>
 <a name="row">
@@ -369,7 +369,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>row</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.226">row</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.225">row</a></pre>
 </li>
 </ul>
 <a name="rowLength">
@@ -378,7 +378,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>rowLength</h4>
-<pre>private&nbsp;short <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.227">rowLength</a></pre>
+<pre>private&nbsp;short <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.226">rowLength</a></pre>
 </li>
 </ul>
 <a name="fam">
@@ -387,7 +387,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>fam</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.228">fam</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.227">fam</a></pre>
 </li>
 </ul>
 <a name="famLength">
@@ -396,7 +396,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>famLength</h4>
-<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.229">famLength</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.228">famLength</a></pre>
 </li>
 </ul>
 <a name="qual">
@@ -405,7 +405,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>qual</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.230">qual</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.229">qual</a></pre>
 </li>
 </ul>
 <a name="qualLength">
@@ -414,7 +414,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>qualLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.231">qualLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.230">qualLength</a></pre>
 </li>
 </ul>
 <a name="val">
@@ -423,7 +423,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>val</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.232">val</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.231">val</a></pre>
 </li>
 </ul>
 <a name="valOffset">
@@ -432,7 +432,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>valOffset</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.233">valOffset</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.232">valOffset</a></pre>
 </li>
 </ul>
 <a name="valLength">
@@ -441,7 +441,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>valLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.234">valLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.233">valLength</a></pre>
 </li>
 </ul>
 <a name="tag">
@@ -450,7 +450,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>tag</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.235">tag</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.234">tag</a></pre>
 </li>
 </ul>
 <a name="tagsLength">
@@ -459,7 +459,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>tagsLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.236">tagsLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.235">tagsLength</a></pre>
 </li>
 </ul>
 <a name="ts">
@@ -468,7 +468,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>ts</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.237">ts</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.236">ts</a></pre>
 </li>
 </ul>
 <a name="seqId">
@@ -477,7 +477,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>seqId</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.238">seqId</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.237">seqId</a></pre>
 </li>
 </ul>
 <a name="type">
@@ -486,7 +486,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>type</h4>
-<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.239">type</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.238">type</a></pre>
 </li>
 </ul>
 </li>
@@ -503,7 +503,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PrefixTreeSeeker.OnheapPrefixTreeCell</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.241">PrefixTreeSeeker.OnheapPrefixTreeCell</a>(byte[]&nbsp;row,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.240">PrefixTreeSeeker.OnheapPrefixTreeCell</a>(byte[]&nbsp;row,
                                      int&nbsp;rowOffset,
                                      short&nbsp;rowLength,
                                      byte[]&nbsp;fam,
@@ -537,7 +537,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>setSequenceId</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.266">setSequenceId</a>(long&nbsp;seqId)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.265">setSequenceId</a>(long&nbsp;seqId)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/SettableSequenceId.html#setSequenceId(long)">SettableSequenceId</a></code></strong></div>
 <div class="block">Sets with the given seqId.</div>
 <dl>
@@ -552,7 +552,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.271">getRowArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.270">getRowArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.</div>
@@ -568,7 +568,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.276">getRowOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.275">getRowOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowOffset()">getRowOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -581,7 +581,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getRowLength</h4>
-<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.281">getRowLength</a>()</pre>
+<pre>public&nbsp;short&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.280">getRowLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getRowLength()">getRowLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -594,7 +594,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.286">getFamilyArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.285">getFamilyArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous bytes composed of legal HDFS filename characters which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.</div>
@@ -610,7 +610,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.291">getFamilyOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.290">getFamilyOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyOffset()">getFamilyOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -623,7 +623,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyLength</h4>
-<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.296">getFamilyLength</a>()</pre>
+<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.295">getFamilyLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getFamilyLength()">getFamilyLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -636,7 +636,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.301">getQualifierArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.300">getQualifierArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.</div>
@@ -652,7 +652,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.306">getQualifierOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.305">getQualifierOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierOffset()">getQualifierOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -665,7 +665,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getQualifierLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.311">getQualifierLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.310">getQualifierLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getQualifierLength()">getQualifierLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -678,7 +678,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTimestamp</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.316">getTimestamp</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.315">getTimestamp</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTimestamp()">getTimestamp</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -692,7 +692,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTypeByte</h4>
-<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.321">getTypeByte</a>()</pre>
+<pre>public&nbsp;byte&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.320">getTypeByte</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTypeByte()">getTypeByte</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -705,7 +705,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getSequenceId</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.326">getSequenceId</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.325">getSequenceId</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getSequenceId()">Cell</a></code></strong></div>
 <div class="block">A region-specific unique monotonically increasing sequence ID given to each Cell. It always
  exists for cells in the memstore but is not retained forever. It will be kept for
@@ -723,7 +723,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.331">getValueArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.330">getValueArray</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueArray()">Cell</a></code></strong></div>
 <div class="block">Contiguous raw bytes that may start at any index in the containing array. Max length is
  Integer.MAX_VALUE which is 2,147,483,648 bytes.</div>
@@ -739,7 +739,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.336">getValueOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.335">getValueOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueOffset()">getValueOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -752,7 +752,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.341">getValueLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.340">getValueLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getValueLength()">getValueLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -765,7 +765,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsArray</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.346">getTagsArray</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.345">getTagsArray</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsArray()">getTagsArray</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -778,7 +778,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsOffset</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.351">getTagsOffset</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.350">getTagsOffset</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsOffset()">getTagsOffset</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -791,7 +791,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>getTagsLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.356">getTagsLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.355">getTagsLength</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html#getTagsLength()">getTagsLength</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></dd>
@@ -804,7 +804,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.361">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.360">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -817,7 +817,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="
 <ul class="blockListLast">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.372">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html#line.371">heapSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize()">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>


[48/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index de0d003..ea9c5c4 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -73,1205 +73,1207 @@
 <span class="sourceLineNo">065</span>  public static final byte[] RPC_HEADER = new byte[] { 'H', 'B', 'a', 's' };<a name="line.65"></a>
 <span class="sourceLineNo">066</span>  public static final byte RPC_CURRENT_VERSION = 0;<a name="line.66"></a>
 <span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  // HFileBlock constants.<a name="line.68"></a>
-<span class="sourceLineNo">069</span><a name="line.69"></a>
-<span class="sourceLineNo">070</span>  /** The size data structures with minor version is 0 */<a name="line.70"></a>
-<span class="sourceLineNo">071</span>  public static final int HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      + Bytes.SIZEOF_LONG;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  /** The size of a version 2 HFile block header, minor version 1.<a name="line.73"></a>
-<span class="sourceLineNo">074</span>   * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum<a name="line.74"></a>
-<span class="sourceLineNo">075</span>   * followed by another 4 byte value to store sizeofDataOnDisk.<a name="line.75"></a>
-<span class="sourceLineNo">076</span>   */<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  public static final int HFILEBLOCK_HEADER_SIZE = HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM +<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    Bytes.SIZEOF_BYTE + 2 * Bytes.SIZEOF_INT;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  /** Just an array of bytes of the right size. */<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  public static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HFILEBLOCK_HEADER_SIZE];<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  //End HFileBlockConstants.<a name="line.82"></a>
+<span class="sourceLineNo">068</span>  // HFileBlock constants. TODO!!!! THESE DEFINES BELONG IN HFILEBLOCK, NOT UP HERE.<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  // Needed down in hbase-common though by encoders but these encoders should not be dealing<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  // in the internals of hfileblocks. Fix encapsulation.<a name="line.70"></a>
+<span class="sourceLineNo">071</span><a name="line.71"></a>
+<span class="sourceLineNo">072</span>  /** The size data structures with minor version is 0 */<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  public static final int HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM = MAGIC_LENGTH + 2 * Bytes.SIZEOF_INT<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      + Bytes.SIZEOF_LONG;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  /** The size of a version 2 HFile block header, minor version 1.<a name="line.75"></a>
+<span class="sourceLineNo">076</span>   * There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   * followed by another 4 byte value to store sizeofDataOnDisk.<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   */<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  public static final int HFILEBLOCK_HEADER_SIZE = HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM +<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    Bytes.SIZEOF_BYTE + 2 * Bytes.SIZEOF_INT;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  /** Just an array of bytes of the right size. */<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HFILEBLOCK_HEADER_SIZE];<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * Status codes used for return values of bulk operations.<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  @InterfaceAudience.Private<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  public enum OperationStatusCode {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    NOT_RUN,<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    SUCCESS,<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    BAD_FAMILY,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    SANITY_CHECK_FAILURE,<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    FAILURE;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  /** long constant for zero */<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  public static final Long ZERO_L = Long.valueOf(0L);<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final String NINES = "99999999999999";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final String ZEROES = "00000000000000";<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  // For migration<a name="line.101"></a>
+<span class="sourceLineNo">084</span>  //End HFileBlockConstants.<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
+<span class="sourceLineNo">087</span>   * Status codes used for return values of bulk operations.<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   */<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  @InterfaceAudience.Private<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  public enum OperationStatusCode {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    NOT_RUN,<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    SUCCESS,<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    BAD_FAMILY,<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    SANITY_CHECK_FAILURE,<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    FAILURE;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  }<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /** long constant for zero */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  public static final Long ZERO_L = Long.valueOf(0L);<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  public static final String NINES = "99999999999999";<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  public static final String ZEROES = "00000000000000";<a name="line.101"></a>
 <span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  /** name of version file */<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  public static final String VERSION_FILE_NAME = "hbase.version";<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * Current version of file system.<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * Version 4 supports only one kind of bloom filter.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * Version 5 changes versions in catalog table regions.<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * Version 6 enables blockcaching on catalog tables.<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Version 7 introduces hfile -- hbase 0.19 to 0.20..<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * Version 8 introduces namespace<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  // public static final String FILE_SYSTEM_VERSION = "6";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  public static final String FILE_SYSTEM_VERSION = "8";<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  // Configuration parameters<a name="line.117"></a>
+<span class="sourceLineNo">103</span>  // For migration<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  /** name of version file */<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public static final String VERSION_FILE_NAME = "hbase.version";<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>  /**<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * Current version of file system.<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * Version 4 supports only one kind of bloom filter.<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   * Version 5 changes versions in catalog table regions.<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Version 6 enables blockcaching on catalog tables.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * Version 7 introduces hfile -- hbase 0.19 to 0.20..<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * Version 8 introduces namespace<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   */<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  // public static final String FILE_SYSTEM_VERSION = "6";<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public static final String FILE_SYSTEM_VERSION = "8";<a name="line.117"></a>
 <span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  //TODO: Is having HBase homed on port 60k OK?<a name="line.119"></a>
+<span class="sourceLineNo">119</span>  // Configuration parameters<a name="line.119"></a>
 <span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  /** Cluster is in distributed mode or not */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  public static final String CLUSTER_DISTRIBUTED = "hbase.cluster.distributed";<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  /** Config for pluggable load balancers */<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static final String HBASE_MASTER_LOADBALANCER_CLASS = "hbase.master.loadbalancer.class";<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  /** Config for balancing the cluster by table */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public static final String HBASE_MASTER_LOADBALANCE_BYTABLE = "hbase.master.loadbalance.bytable";<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /** The name of the ensemble table */<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  public static final String ENSEMBLE_TABLE_NAME = "hbase:ensemble";<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>  /** Config for pluggable region normalizer */<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  public static final String HBASE_MASTER_NORMALIZER_CLASS =<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    "hbase.master.normalizer.class";<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  /** Cluster is standalone or pseudo-distributed */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static final boolean CLUSTER_IS_LOCAL = false;<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  /** Cluster is fully-distributed */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  public static final boolean CLUSTER_IS_DISTRIBUTED = true;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>  /** Default value for cluster distributed mode */<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public static final boolean DEFAULT_CLUSTER_DISTRIBUTED = CLUSTER_IS_LOCAL;<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>  /** default host address */<a name="line.146"></a>
-<span class="sourceLineNo">147</span>  public static final String DEFAULT_HOST = "0.0.0.0";<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  /** Parameter name for port master listens on. */<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  public static final String MASTER_PORT = "hbase.master.port";<a name="line.150"></a>
-<span class="sourceLineNo">151</span><a name="line.151"></a>
-<span class="sourceLineNo">152</span>  /** default port that the master listens on */<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  public static final int DEFAULT_MASTER_PORT = 16000;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  /** default port for master web api */<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  public static final int DEFAULT_MASTER_INFOPORT = 16010;<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>  /** Configuration key for master web API port */<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public static final String MASTER_INFO_PORT = "hbase.master.info.port";<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>  /** Parameter name for the master type being backup (waits for primary to go inactive). */<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   * by default every master is a possible primary master unless the conf explicitly overrides it<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   */<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  /** Name of ZooKeeper quorum configuration parameter. */<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  public static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  /** Common prefix of ZooKeeper configuration properties */<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  public static final String ZK_CFG_PROPERTY_PREFIX =<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      "hbase.zookeeper.property.";<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  public static final int ZK_CFG_PROPERTY_PREFIX_LEN =<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      ZK_CFG_PROPERTY_PREFIX.length();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  /**<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * The ZK client port key in the ZK properties map. The name reflects the<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * fact that this is not an HBase configuration key.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  public static final String CLIENT_PORT_STR = "clientPort";<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /** Parameter name for the client port that the zookeeper listens on */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  public static final String ZOOKEEPER_CLIENT_PORT =<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      ZK_CFG_PROPERTY_PREFIX + CLIENT_PORT_STR;<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  /** Default client port that the zookeeper listens on */<a name="line.189"></a>
-<span class="sourceLineNo">190</span>  public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>  /**<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * Parameter name for the wait time for the recoverable zookeeper<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      "hbase.zookeeper.recoverable.waittime";<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>  /** Default wait time for the recoverable zookeeper */<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  /** Parameter name for the root dir in ZK for this cluster */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent";<a name="line.202"></a>
-<span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  public static final String DEFAULT_ZOOKEEPER_ZNODE_PARENT = "/hbase";<a name="line.204"></a>
+<span class="sourceLineNo">121</span>  //TODO: Is having HBase homed on port 60k OK?<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  /** Cluster is in distributed mode or not */<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public static final String CLUSTER_DISTRIBUTED = "hbase.cluster.distributed";<a name="line.124"></a>
+<span class="sourceLineNo">125</span><a name="line.125"></a>
+<span class="sourceLineNo">126</span>  /** Config for pluggable load balancers */<a name="line.126"></a>
+<span class="sourceLineNo">127</span>  public static final String HBASE_MASTER_LOADBALANCER_CLASS = "hbase.master.loadbalancer.class";<a name="line.127"></a>
+<span class="sourceLineNo">128</span><a name="line.128"></a>
+<span class="sourceLineNo">129</span>  /** Config for balancing the cluster by table */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public static final String HBASE_MASTER_LOADBALANCE_BYTABLE = "hbase.master.loadbalance.bytable";<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /** The name of the ensemble table */<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  public static final String ENSEMBLE_TABLE_NAME = "hbase:ensemble";<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  /** Config for pluggable region normalizer */<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public static final String HBASE_MASTER_NORMALIZER_CLASS =<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    "hbase.master.normalizer.class";<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  /** Cluster is standalone or pseudo-distributed */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static final boolean CLUSTER_IS_LOCAL = false;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>  /** Cluster is fully-distributed */<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public static final boolean CLUSTER_IS_DISTRIBUTED = true;<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  /** Default value for cluster distributed mode */<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public static final boolean DEFAULT_CLUSTER_DISTRIBUTED = CLUSTER_IS_LOCAL;<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>  /** default host address */<a name="line.148"></a>
+<span class="sourceLineNo">149</span>  public static final String DEFAULT_HOST = "0.0.0.0";<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>  /** Parameter name for port master listens on. */<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  public static final String MASTER_PORT = "hbase.master.port";<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>  /** default port that the master listens on */<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  public static final int DEFAULT_MASTER_PORT = 16000;<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  /** default port for master web api */<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  public static final int DEFAULT_MASTER_INFOPORT = 16010;<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>  /** Configuration key for master web API port */<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  public static final String MASTER_INFO_PORT = "hbase.master.info.port";<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>  /** Parameter name for the master type being backup (waits for primary to go inactive). */<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>  /**<a name="line.166"></a>
+<span class="sourceLineNo">167</span>   * by default every master is a possible primary master unless the conf explicitly overrides it<a name="line.167"></a>
+<span class="sourceLineNo">168</span>   */<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>  /** Name of ZooKeeper quorum configuration parameter. */<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  public static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";<a name="line.172"></a>
+<span class="sourceLineNo">173</span><a name="line.173"></a>
+<span class="sourceLineNo">174</span>  /** Common prefix of ZooKeeper configuration properties */<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  public static final String ZK_CFG_PROPERTY_PREFIX =<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      "hbase.zookeeper.property.";<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  public static final int ZK_CFG_PROPERTY_PREFIX_LEN =<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      ZK_CFG_PROPERTY_PREFIX.length();<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  /**<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   * The ZK client port key in the ZK properties map. The name reflects the<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * fact that this is not an HBase configuration key.<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   */<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  public static final String CLIENT_PORT_STR = "clientPort";<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>  /** Parameter name for the client port that the zookeeper listens on */<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public static final String ZOOKEEPER_CLIENT_PORT =<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      ZK_CFG_PROPERTY_PREFIX + CLIENT_PORT_STR;<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>  /** Default client port that the zookeeper listens on */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;<a name="line.192"></a>
+<span class="sourceLineNo">193</span><a name="line.193"></a>
+<span class="sourceLineNo">194</span>  /**<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * Parameter name for the wait time for the recoverable zookeeper<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   */<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      "hbase.zookeeper.recoverable.waittime";<a name="line.198"></a>
+<span class="sourceLineNo">199</span><a name="line.199"></a>
+<span class="sourceLineNo">200</span>  /** Default wait time for the recoverable zookeeper */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  /** Parameter name for the root dir in ZK for this cluster */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  public static final String ZOOKEEPER_ZNODE_PARENT = "zookeeper.znode.parent";<a name="line.204"></a>
 <span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Parameter name for the limit on concurrent client-side zookeeper<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * connections<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  public static final String ZOOKEEPER_MAX_CLIENT_CNXNS =<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      ZK_CFG_PROPERTY_PREFIX + "maxClientCnxns";<a name="line.211"></a>
-<span class="sourceLineNo">212</span><a name="line.212"></a>
-<span class="sourceLineNo">213</span>  /** Parameter name for the ZK data directory */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  public static final String ZOOKEEPER_DATA_DIR =<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      ZK_CFG_PROPERTY_PREFIX + "dataDir";<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /** Parameter name for the ZK tick time */<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final String ZOOKEEPER_TICK_TIME =<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ZK_CFG_PROPERTY_PREFIX + "tickTime";<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>  /** Default limit on concurrent client-side zookeeper connections */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  public static final int DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS = 300;<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /** Configuration key for ZooKeeper session timeout */<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  public static final String ZK_SESSION_TIMEOUT = "zookeeper.session.timeout";<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>  /** Default value for ZooKeeper session timeout */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>  /** Configuration key for whether to use ZK.multi */<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>  /** Parameter name for port region server listens on. */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  public static final String REGIONSERVER_PORT = "hbase.regionserver.port";<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /** Default port region server listens on. */<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  public static final int DEFAULT_REGIONSERVER_PORT = 16020;<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>  /** default port for region server web api */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  public static final int DEFAULT_REGIONSERVER_INFOPORT = 16030;<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /** A configuration key for regionserver info port */<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  public static final String REGIONSERVER_INFO_PORT =<a name="line.243"></a>
-<span class="sourceLineNo">244</span>    "hbase.regionserver.info.port";<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>  /** A flag that enables automatic selection of regionserver info port */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static final String REGIONSERVER_INFO_PORT_AUTO =<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      REGIONSERVER_INFO_PORT + ".auto";<a name="line.248"></a>
-<span class="sourceLineNo">249</span><a name="line.249"></a>
-<span class="sourceLineNo">250</span>  /** Parameter name for what region server implementation to use. */<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public static final String REGION_SERVER_IMPL= "hbase.regionserver.impl";<a name="line.251"></a>
-<span class="sourceLineNo">252</span><a name="line.252"></a>
-<span class="sourceLineNo">253</span>  /** Parameter name for what master implementation to use. */<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  public static final String MASTER_IMPL= "hbase.master.impl";<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>  /** Parameter name for what hbase client implementation to use. */<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  public static final String HBASECLIENT_IMPL= "hbase.hbaseclient.impl";<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>  /** Parameter name for how often threads should wake up */<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  public static final String THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  /** Default value for thread wake frequency */<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  public static final int DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>  public static final String VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.268"></a>
-<span class="sourceLineNo">269</span>  public static final int DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>  /** Parameter name for how often a region should should perform a major compaction */<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  public static final String MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  /** Parameter name for the maximum batch of KVs to be used in flushes and compactions */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  public static final String COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public static final int COMPACTION_KV_MAX_DEFAULT = 10;<a name="line.276"></a>
-<span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>  /** Parameter name for HBase instance root directory */<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  public static final String HBASE_DIR = "hbase.rootdir";<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  /** Parameter name for HBase client IPC pool type */<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public static final String HBASE_CLIENT_IPC_POOL_TYPE = "hbase.client.ipc.pool.type";<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /** Parameter name for HBase client IPC pool size */<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  public static final String HBASE_CLIENT_IPC_POOL_SIZE = "hbase.client.ipc.pool.size";<a name="line.285"></a>
-<span class="sourceLineNo">286</span><a name="line.286"></a>
-<span class="sourceLineNo">287</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout";<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  public static final String HBASE_CLIENT_META_OPERATION_TIMEOUT =<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    "hbase.client.meta.operation.timeout";<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  /** Default HBase client operation timeout, which is tantamount to a blocking call */<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  public static final int DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT = 1200000;<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /** Used to construct the name of the log directory for a region server */<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  public static final String HREGION_LOGDIR_NAME = "WALs";<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>  /** Used to construct the name of the splitlog directory for a region server */<a name="line.300"></a>
-<span class="sourceLineNo">301</span>  public static final String SPLIT_LOGDIR_NAME = "splitWAL";<a name="line.301"></a>
-<span class="sourceLineNo">302</span><a name="line.302"></a>
-<span class="sourceLineNo">303</span>  /** Like the previous, but for old logs that are about to be deleted */<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span>  public static final String CORRUPT_DIR_NAME = "corrupt";<a name="line.306"></a>
+<span class="sourceLineNo">206</span>  public static final String DEFAULT_ZOOKEEPER_ZNODE_PARENT = "/hbase";<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>  /**<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * Parameter name for the limit on concurrent client-side zookeeper<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   * connections<a name="line.210"></a>
+<span class="sourceLineNo">211</span>   */<a name="line.211"></a>
+<span class="sourceLineNo">212</span>  public static final String ZOOKEEPER_MAX_CLIENT_CNXNS =<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      ZK_CFG_PROPERTY_PREFIX + "maxClientCnxns";<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>  /** Parameter name for the ZK data directory */<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final String ZOOKEEPER_DATA_DIR =<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      ZK_CFG_PROPERTY_PREFIX + "dataDir";<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  /** Parameter name for the ZK tick time */<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  public static final String ZOOKEEPER_TICK_TIME =<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      ZK_CFG_PROPERTY_PREFIX + "tickTime";<a name="line.221"></a>
+<span class="sourceLineNo">222</span><a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /** Default limit on concurrent client-side zookeeper connections */<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  public static final int DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS = 300;<a name="line.224"></a>
+<span class="sourceLineNo">225</span><a name="line.225"></a>
+<span class="sourceLineNo">226</span>  /** Configuration key for ZooKeeper session timeout */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  public static final String ZK_SESSION_TIMEOUT = "zookeeper.session.timeout";<a name="line.227"></a>
+<span class="sourceLineNo">228</span><a name="line.228"></a>
+<span class="sourceLineNo">229</span>  /** Default value for ZooKeeper session timeout */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  public static final int DEFAULT_ZK_SESSION_TIMEOUT = 180 * 1000;<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  /** Configuration key for whether to use ZK.multi */<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  public static final String ZOOKEEPER_USEMULTI = "hbase.zookeeper.useMulti";<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>  /** Parameter name for port region server listens on. */<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  public static final String REGIONSERVER_PORT = "hbase.regionserver.port";<a name="line.236"></a>
+<span class="sourceLineNo">237</span><a name="line.237"></a>
+<span class="sourceLineNo">238</span>  /** Default port region server listens on. */<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  public static final int DEFAULT_REGIONSERVER_PORT = 16020;<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /** default port for region server web api */<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  public static final int DEFAULT_REGIONSERVER_INFOPORT = 16030;<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>  /** A configuration key for regionserver info port */<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  public static final String REGIONSERVER_INFO_PORT =<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    "hbase.regionserver.info.port";<a name="line.246"></a>
+<span class="sourceLineNo">247</span><a name="line.247"></a>
+<span class="sourceLineNo">248</span>  /** A flag that enables automatic selection of regionserver info port */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  public static final String REGIONSERVER_INFO_PORT_AUTO =<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      REGIONSERVER_INFO_PORT + ".auto";<a name="line.250"></a>
+<span class="sourceLineNo">251</span><a name="line.251"></a>
+<span class="sourceLineNo">252</span>  /** Parameter name for what region server implementation to use. */<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  public static final String REGION_SERVER_IMPL= "hbase.regionserver.impl";<a name="line.253"></a>
+<span class="sourceLineNo">254</span><a name="line.254"></a>
+<span class="sourceLineNo">255</span>  /** Parameter name for what master implementation to use. */<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  public static final String MASTER_IMPL= "hbase.master.impl";<a name="line.256"></a>
+<span class="sourceLineNo">257</span><a name="line.257"></a>
+<span class="sourceLineNo">258</span>  /** Parameter name for what hbase client implementation to use. */<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  public static final String HBASECLIENT_IMPL= "hbase.hbaseclient.impl";<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  /** Parameter name for how often threads should wake up */<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  public static final String THREAD_WAKE_FREQUENCY = "hbase.server.thread.wakefrequency";<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>  /** Default value for thread wake frequency */<a name="line.264"></a>
+<span class="sourceLineNo">265</span>  public static final int DEFAULT_THREAD_WAKE_FREQUENCY = 10 * 1000;<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.267"></a>
+<span class="sourceLineNo">268</span>  public static final String VERSION_FILE_WRITE_ATTEMPTS = "hbase.server.versionfile.writeattempts";<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>  /** Parameter name for how often we should try to write a version file, before failing */<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  public static final int DEFAULT_VERSION_FILE_WRITE_ATTEMPTS = 3;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  /** Parameter name for how often a region should should perform a major compaction */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public static final String MAJOR_COMPACTION_PERIOD = "hbase.hregion.majorcompaction";<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /** Parameter name for the maximum batch of KVs to be used in flushes and compactions */<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public static final String COMPACTION_KV_MAX = "hbase.hstore.compaction.kv.max";<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  public static final int COMPACTION_KV_MAX_DEFAULT = 10;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>  /** Parameter name for HBase instance root directory */<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  public static final String HBASE_DIR = "hbase.rootdir";<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>  /** Parameter name for HBase client IPC pool type */<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  public static final String HBASE_CLIENT_IPC_POOL_TYPE = "hbase.client.ipc.pool.type";<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /** Parameter name for HBase client IPC pool size */<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  public static final String HBASE_CLIENT_IPC_POOL_SIZE = "hbase.client.ipc.pool.size";<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public static final String HBASE_CLIENT_OPERATION_TIMEOUT = "hbase.client.operation.timeout";<a name="line.290"></a>
+<span class="sourceLineNo">291</span><a name="line.291"></a>
+<span class="sourceLineNo">292</span>  /** Parameter name for HBase client operation timeout, which overrides RPC timeout */<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  public static final String HBASE_CLIENT_META_OPERATION_TIMEOUT =<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    "hbase.client.meta.operation.timeout";<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
+<span class="sourceLineNo">296</span>  /** Default HBase client operation timeout, which is tantamount to a blocking call */<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  public static final int DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT = 1200000;<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>  /** Used to construct the name of the log directory for a region server */<a name="line.299"></a>
+<span class="sourceLineNo">300</span>  public static final String HREGION_LOGDIR_NAME = "WALs";<a name="line.300"></a>
+<span class="sourceLineNo">301</span><a name="line.301"></a>
+<span class="sourceLineNo">302</span>  /** Used to construct the name of the splitlog directory for a region server */<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  public static final String SPLIT_LOGDIR_NAME = "splitWAL";<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  /** Like the previous, but for old logs that are about to be deleted */<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  public static final String HREGION_OLDLOGDIR_NAME = "oldWALs";<a name="line.306"></a>
 <span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /** Used by HBCK to sideline backup data */<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  public static final String HBCK_SIDELINEDIR_NAME = ".hbck";<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>  /** Any artifacts left from migration can be moved here */<a name="line.311"></a>
-<span class="sourceLineNo">312</span>  public static final String MIGRATION_NAME = ".migration";<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>  /**<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * The directory from which co-processor/custom filter jars can be loaded<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * dynamically by the region servers. This value can be overridden by the<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   * hbase.dynamic.jars.dir config.<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String LIB_DIR = "lib";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  /** Used to construct the name of the compaction directory during compaction */<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  public static final String HREGION_COMPACTIONDIR_NAME = "compaction.dir";<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /** Conf key for the max file size after which we split the region */<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String HREGION_MAX_FILESIZE =<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      "hbase.hregion.max.filesize";<a name="line.326"></a>
-<span class="sourceLineNo">327</span><a name="line.327"></a>
-<span class="sourceLineNo">328</span>  /** Default maximum file size */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  public static final long DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024L;<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  /**<a name="line.331"></a>
-<span class="sourceLineNo">332</span>   * Max size of single row for Get's or Scan's without in-row scanning flag set.<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  public static final String TABLE_MAX_ROWSIZE_KEY = "hbase.table.max.rowsize";<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * Default max row size (1 Gb).<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   */<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  public static final long TABLE_MAX_ROWSIZE_DEFAULT = 1024 * 1024 * 1024L;<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * The max number of threads used for opening and closing stores or store<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * files in parallel<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  public static final String HSTORE_OPEN_AND_CLOSE_THREADS_MAX =<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    "hbase.hstore.open.and.close.threads.max";<a name="line.346"></a>
-<span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  /**<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   * The default number for the max number of threads used for opening and<a name="line.349"></a>
-<span class="sourceLineNo">350</span>   * closing stores or store files in parallel<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   */<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  public static final int DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX = 1;<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>  /**<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * Block updates if memstore has hbase.hregion.memstore.block.multiplier<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   * times hbase.hregion.memstore.flush.size bytes.  Useful preventing<a name="line.356"></a>
-<span class="sourceLineNo">357</span>   * runaway memstore during spikes in update traffic.<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   */<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  public static final String HREGION_MEMSTORE_BLOCK_MULTIPLIER =<a name="line.359"></a>
-<span class="sourceLineNo">360</span>          "hbase.hregion.memstore.block.multiplier";<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * Default value for hbase.hregion.memstore.block.multiplier<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  public static final int DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER = 4;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** Conf key for the memstore size at which we flush the memstore */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String HREGION_MEMSTORE_FLUSH_SIZE =<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      "hbase.hregion.memstore.flush.size";<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  public static final String HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      "hbase.hregion.edits.replay.skip.errors";<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  public static final boolean DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      false;<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  /** Maximum value length, enforced on KeyValue construction */<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public static final int MAXIMUM_VALUE_LENGTH = Integer.MAX_VALUE - 1;<a name="line.378"></a>
-<span class="sourceLineNo">379</span><a name="line.379"></a>
-<span class="sourceLineNo">380</span>  /** name of the file for unique cluster ID */<a name="line.380"></a>
-<span class="sourceLineNo">381</span>  public static final String CLUSTER_ID_FILE_NAME = "hbase.id";<a name="line.381"></a>
-<span class="sourceLineNo">382</span><a name="line.382"></a>
-<span class="sourceLineNo">383</span>  /** Default value for cluster ID */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public static final String CLUSTER_ID_DEFAULT = "default-cluster";<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /** Parameter name for # days to keep MVCC values during a major compaction */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  public static final String KEEP_SEQID_PERIOD = "hbase.hstore.compaction.keep.seqId.period";<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  /** At least to keep MVCC values in hfiles for 5 days */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static final int MIN_KEEP_SEQID_PERIOD = 5;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // Always store the location of the root table's HRegion.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // This HRegion is never split.<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>  // region name = table + startkey + regionid. This is the row key.<a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // each row in the root and meta tables describes exactly 1 region<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  // Do we ever need to know all the information that we are storing?<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // Note that the name of the root table starts with "-" and the name of the<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  // meta table starts with "." Why? it's a trick. It turns out that when we<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  // store region names in memory, we use a SortedMap. Since "-" sorts before<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // "." (and since no other table name can start with either of these<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  // characters, the root region will always be the first entry in such a Map,<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  // followed by all the meta regions (which will be ordered by their starting<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // row key as well), followed by all user tables. So when the Master is<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  // choosing regions to assign, it will always choose the root region first,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  // followed by the meta regions, followed by user regions. Since the root<a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // and meta regions always need to be on-line, this ensures that they will<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  // be the first to be reassigned if the server(s) they are being served by<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  // should go down.<a name="line.409"></a>
-<span class="sourceLineNo">410</span><a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>  /**<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * The hbase:meta table's name.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @deprecated For upgrades of 0.94 to 0.96<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  @Deprecated  // for compat from 0.94 -&gt; 0.96.<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  public static final String BASE_NAMESPACE_DIR = "data";<a name="line.419"></a>
+<span class="sourceLineNo">308</span>  public static final String CORRUPT_DIR_NAME = "corrupt";<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  /** Used by HBCK to sideline backup data */<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  public static final String HBCK_SIDELINEDIR_NAME = ".hbck";<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /** Any artifacts left from migration can be moved here */<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  public static final String MIGRATION_NAME = ".migration";<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  /**<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * The directory from which co-processor/custom filter jars can be loaded<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   * dynamically by the region servers. This value can be overridden by the<a name="line.318"></a>
+<span class="sourceLineNo">319</span>   * hbase.dynamic.jars.dir config.<a name="line.319"></a>
+<span class="sourceLineNo">320</span>   */<a name="line.320"></a>
+<span class="sourceLineNo">321</span>  public static final String LIB_DIR = "lib";<a name="line.321"></a>
+<span class="sourceLineNo">322</span><a name="line.322"></a>
+<span class="sourceLineNo">323</span>  /** Used to construct the name of the compaction directory during compaction */<a name="line.323"></a>
+<span class="sourceLineNo">324</span>  public static final String HREGION_COMPACTIONDIR_NAME = "compaction.dir";<a name="line.324"></a>
+<span class="sourceLineNo">325</span><a name="line.325"></a>
+<span class="sourceLineNo">326</span>  /** Conf key for the max file size after which we split the region */<a name="line.326"></a>
+<span class="sourceLineNo">327</span>  public static final String HREGION_MAX_FILESIZE =<a name="line.327"></a>
+<span class="sourceLineNo">328</span>      "hbase.hregion.max.filesize";<a name="line.328"></a>
+<span class="sourceLineNo">329</span><a name="line.329"></a>
+<span class="sourceLineNo">330</span>  /** Default maximum file size */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  public static final long DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024 * 1024L;<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * Max size of single row for Get's or Scan's without in-row scanning flag set.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  public static final String TABLE_MAX_ROWSIZE_KEY = "hbase.table.max.rowsize";<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   * Default max row size (1 Gb).<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   */<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  public static final long TABLE_MAX_ROWSIZE_DEFAULT = 1024 * 1024 * 1024L;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  /**<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * The max number of threads used for opening and closing stores or store<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   * files in parallel<a name="line.345"></a>
+<span class="sourceLineNo">346</span>   */<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  public static final String HSTORE_OPEN_AND_CLOSE_THREADS_MAX =<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    "hbase.hstore.open.and.close.threads.max";<a name="line.348"></a>
+<span class="sourceLineNo">349</span><a name="line.349"></a>
+<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   * The default number for the max number of threads used for opening and<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * closing stores or store files in parallel<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   */<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  public static final int DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX = 1;<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /**<a name="line.356"></a>
+<span class="sourceLineNo">357</span>   * Block updates if memstore has hbase.hregion.memstore.block.multiplier<a name="line.357"></a>
+<span class="sourceLineNo">358</span>   * times hbase.hregion.memstore.flush.size bytes.  Useful preventing<a name="line.358"></a>
+<span class="sourceLineNo">359</span>   * runaway memstore during spikes in update traffic.<a name="line.359"></a>
+<span class="sourceLineNo">360</span>   */<a name="line.360"></a>
+<span class="sourceLineNo">361</span>  public static final String HREGION_MEMSTORE_BLOCK_MULTIPLIER =<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          "hbase.hregion.memstore.block.multiplier";<a name="line.362"></a>
+<span class="sourceLineNo">363</span><a name="line.363"></a>
+<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   * Default value for hbase.hregion.memstore.block.multiplier<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
+<span class="sourceLineNo">367</span>  public static final int DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER = 4;<a name="line.367"></a>
+<span class="sourceLineNo">368</span><a name="line.368"></a>
+<span class="sourceLineNo">369</span>  /** Conf key for the memstore size at which we flush the memstore */<a name="line.369"></a>
+<span class="sourceLineNo">370</span>  public static final String HREGION_MEMSTORE_FLUSH_SIZE =<a name="line.370"></a>
+<span class="sourceLineNo">371</span>      "hbase.hregion.memstore.flush.size";<a name="line.371"></a>
+<span class="sourceLineNo">372</span><a name="line.372"></a>
+<span class="sourceLineNo">373</span>  public static final String HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      "hbase.hregion.edits.replay.skip.errors";<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  public static final boolean DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS =<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      false;<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>  /** Maximum value length, enforced on KeyValue construction */<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  public static final int MAXIMUM_VALUE_LENGTH = Integer.MAX_VALUE - 1;<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /** name of the file for unique cluster ID */<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  public static final String CLUSTER_ID_FILE_NAME = "hbase.id";<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /** Default value for cluster ID */<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  public static final String CLUSTER_ID_DEFAULT = "default-cluster";<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /** Parameter name for # days to keep MVCC values during a major compaction */<a name="line.388"></a>
+<span class="sourceLineNo">389</span>  public static final String KEEP_SEQID_PERIOD = "hbase.hstore.compaction.keep.seqId.period";<a name="line.389"></a>
+<span class="sourceLineNo">390</span>  /** At least to keep MVCC values in hfiles for 5 days */<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  public static final int MIN_KEEP_SEQID_PERIOD = 5;<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // Always store the location of the root table's HRegion.<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  // This HRegion is never split.<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // region name = table + startkey + regionid. This is the row key.<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  // each row in the root and meta tables describes exactly 1 region<a name="line.397"></a>
+<span class="sourceLineNo">398</span>  // Do we ever need to know all the information that we are storing?<a name="line.398"></a>
+<span class="sourceLineNo">399</span><a name="line.399"></a>
+<span class="sourceLineNo">400</span>  // Note that the name of the root table starts with "-" and the name of the<a name="line.400"></a>
+<span class="sourceLineNo">401</span>  // meta table starts with "." Why? it's a trick. It turns out that when we<a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // store region names in memory, we use a SortedMap. Since "-" sorts before<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  // "." (and since no other table name can start with either of these<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  // characters, the root region will always be the first entry in such a Map,<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // followed by all the meta regions (which will be ordered by their starting<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  // row key as well), followed by all user tables. So when the Master is<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  // choosing regions to assign, it will always choose the root region first,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>  // followed by the meta regions, followed by user regions. Since the root<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  // and meta regions always need to be on-line, this ensures that they will<a name="line.409"></a>
+<span class="sourceLineNo">410</span>  // be the first to be reassigned if the server(s) they are being served by<a name="line.410"></a>
+<span class="sourceLineNo">411</span>  // should go down.<a name="line.411"></a>
+<span class="sourceLineNo">412</span><a name="line.412"></a>
+<span class="sourceLineNo">413</span><a name="line.413"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * The hbase:meta table's name.<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   * @deprecated For upgrades of 0.94 to 0.96<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   */<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  @Deprecated  // for compat from 0.94 -&gt; 0.96.<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  /** delimiter used between portions of a region name */<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  public static final int META_ROW_DELIMITER = ',';<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>  /** The catalog family as a string*/<a name="line.424"></a>
-<span class="sourceLineNo">425</span>  public static final String CATALOG_FAMILY_STR = "info";<a name="line.425"></a>
-<span class="sourceLineNo">426</span><a name="line.426"></a>
-<span class="sourceLineNo">427</span>  /** The catalog family */<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR);<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /** The RegionInfo qualifier as a string */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>  public static final String REGIONINFO_QUALIFIER_STR = "regioninfo";<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>  /** The regioninfo column qualifier */<a name="line.433"></a>
-<span class="sourceLineNo">434</span>  public static final byte [] REGIONINFO_QUALIFIER = Bytes.toBytes(REGIONINFO_QUALIFIER_STR);<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>  /** The server column qualifier */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  public static final String SERVER_QUALIFIER_STR = "server";<a name="line.437"></a>
+<span class="sourceLineNo">421</span>  public static final String BASE_NAMESPACE_DIR = "data";<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  /** delimiter used between portions of a region name */<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  public static final int META_ROW_DELIMITER = ',';<a name="line.424"></a>
+<span class="sourceLineNo">425</span><a name="line.425"></a>
+<span class="sourceLineNo">426</span>  /** The catalog family as a string*/<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  public static final String CATALOG_FAMILY_STR = "info";<a name="line.427"></a>
+<span class="sourceLineNo">428</span><a name="line.428"></a>
+<span class="sourceLineNo">429</span>  /** The catalog family */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>  public static final byte [] CATALOG_FAMILY = Bytes.toBytes(CATALOG_FAMILY_STR);<a name="line.430"></a>
+<span class="sourceLineNo">431</span><a name="line.431"></a>
+<span class="sourceLineNo">432</span>  /** The RegionInfo qualifier as a string */<a name="line.432"></a>
+<span class="sourceLineNo">433</span>  public static final String REGIONINFO_QUALIFIER_STR = "regioninfo";<a name="line.433"></a>
+<span class="sourceLineNo">434</span><a name="line.434"></a>
+<span class="sourceLineNo">435</span>  /** The regioninfo column qualifier */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  public static final byte [] REGIONINFO_QUALIFIER = Bytes.toBytes(REGIONINFO_QUALIFIER_STR);<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
 <span class="sourceLineNo">438</span>  /** The server column qualifier */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  public static final byte [] SERVER_QUALIFIER = Bytes.toBytes(SERVER_QUALIFIER_STR);<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /** The startcode column qualifier */<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  public static final String STARTCODE_QUALIFIER_STR = "serverstartcode";<a name="line.442"></a>
+<span class="sourceLineNo">439</span>  public static final String SERVER_QUALIFIER_STR = "server";<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  /** The server column qualifier */<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  public static final byte [] SERVER_QUALIFIER = Bytes.toBytes(SERVER_QUALIFIER_STR);<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
 <span class="sourceLineNo">443</span>  /** The startcode column qualifier */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes(STARTCODE_QUALIFIER_STR);<a name="line.444"></a>
-<span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  /** The open seqnum column qualifier */<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  public static final String SEQNUM_QUALIFIER_STR = "seqnumDuringOpen";<a name="line.447"></a>
+<span class="sourceLineNo">444</span>  public static final String STARTCODE_QUALIFIER_STR = "serverstartcode";<a name="line.444"></a>
+<span class="sourceLineNo">445</span>  /** The startcode column qualifier */<a name="line.445"></a>
+<span class="sourceLineNo">446</span>  public static final byte [] STARTCODE_QUALIFIER = Bytes.toBytes(STARTCODE_QUALIFIER_STR);<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
 <span class="sourceLineNo">448</span>  /** The open seqnum column qualifier */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  public static final byte [] SEQNUM_QUALIFIER = Bytes.toBytes(SEQNUM_QUALIFIER_STR);<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>  /** The state column qualifier */<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  public static final String STATE_QUALIFIER_STR = "state";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static final byte [] STATE_QUALIFIER = Bytes.toBytes(STATE_QUALIFIER_STR);<a name="line.454"></a>
+<span class="sourceLineNo">449</span>  public static final String SEQNUM_QUALIFIER_STR = "seqnumDuringOpen";<a name="line.449"></a>
+<span class="sourceLineNo">450</span>  /** The open seqnum column qualifier */<a name="line.450"></a>
+<span class="sourceLineNo">451</span>  public static final byte [] SEQNUM_QUALIFIER = Bytes.toBytes(SEQNUM_QUALIFIER_STR);<a name="line.451"></a>
+<span class="sourceLineNo">452</span><a name="line.452"></a>
+<span class="sourceLineNo">453</span>  /** The state column qualifier */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  public static final String STATE_QUALIFIER_STR = "state";<a name="line.454"></a>
 <span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
-<span class="sourceLineNo">457</span>   * The serverName column qualifier. Its the server where the region is<a name="line.457"></a>
-<span class="sourceLineNo">458</span>   * transitioning on, while column server is the server where the region is<a name="line.458"></a>
-<span class="sourceLineNo">459</span>   * opened on. They are the same when the region is in state OPEN.<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   */<a name="line.460"></a>
-<span class="sourceLineNo">461</span>  public static final String SERVERNAME_QUALIFIER_STR = "sn";<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>  public static final byte [] SERVERNAME_QUALIFIER = Bytes.toBytes(SERVERNAME_QUALIFIER_STR);<a name="line.463"></a>
+<span class="sourceLineNo">456</span>  public static final byte [] STATE_QUALIFIER = Bytes.toBytes(STATE_QUALIFIER_STR);<a name="line.456"></a>
+<span class="sourceLineNo">457</span><a name="line.457"></a>
+<span class="sourceLineNo">458</span>  /**<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   * The serverName column qualifier. Its the server where the region is<a name="line.459"></a>
+<span class="sourceLineNo">460</span>   * transitioning on, while column server is the server where the region is<a name="line.460"></a>
+<span class="sourceLineNo">461</span>   * opened on. They are the same when the region is in state OPEN.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
+<span class="sourceLineNo">463</span>  public static final String SERVERNAME_QUALIFIER_STR = "sn";<a name="line.463"></a>
 <span class="sourceLineNo">464</span><a name="line.464"></a>
-<span class="sourceLineNo">465</span>  /** The lower-half split region column qualifier */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>  public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA");<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>  /** The upper-half split region column qualifier */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB");<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  /** The lower-half merge region column qualifier */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  public static final byte[] MERGEA_QUALIFIER = Bytes.toBytes("mergeA");<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  /** The upper-half merge region column qualifier */<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  public static final byte[] MERGEB_QUALIFIER = Bytes.toBytes("mergeB");<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  /** The catalog family as a string*/<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  public static final String TABLE_FAMILY_STR = "table";<a name="line.478"></a>
-<span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>  /** The catalog family */<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  public static final byte [] TABLE_FAMILY = Bytes.toBytes(TABLE_FAMILY_STR);<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>  /** The serialized table state qualifier */<a name="line.483"></a>
-<span class="sourceLineNo">484</span>  public static final byte[] TABLE_STATE_QUALIFIER = Bytes.toBytes("state");<a name="line.484"></a>
-<span class="sourceLineNo">485</span><a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>  /**<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * The meta table version column qualifier.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * We keep current version of the meta table in this column in &lt;code&gt;-ROOT-&lt;/code&gt;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * table: i.e. in the 'info:v' column.<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   */<a name="line.491"></a>
-<span class="sourceLineNo">492</span>  public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v");<a name="line.492"></a>
-<span class="sourceLineNo">493</span><a name="line.493"></a>
-<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * The current version of the meta table.<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * - pre-hbase 0.92.  There is no META_VERSION column in the root table<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * in this case. The meta has HTableDescriptor serialized into the HRegionInfo;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * - version 0 is 0.92 and 0.94. Meta data has serialized HRegionInfo's using<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * Writable serialization, and HRegionInfo's does not contain HTableDescriptors.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * - version 1 for 0.96+ keeps HRegionInfo data structures, but changes the<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   * byte[] serialization from Writables to Protobuf.<a name="line.501"></a>
-<span class="sourceLineNo">502</span>   * See HRegionInfo.VERSION<a name="line.502"></a>
-<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  public static final short META_VERSION = 1;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  // Other constants<a name="line.506"></a>
+<span class="sourceLineNo">465</span>  public static final byte [] SERVERNAME_QUALIFIER = Bytes.toBytes(SERVERNAME_QUALIFIER_STR);<a name="line.465"></a>
+<span class="sourceLineNo">466</span><a name="line.466"></a>
+<span class="sourceLineNo">467</span>  /** The lower-half split region column qualifier */<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  public static final byte [] SPLITA_QUALIFIER = Bytes.toBytes("splitA");<a name="line.468"></a>
+<span class="sourceLineNo">469</span><a name="line.469"></a>
+<span class="sourceLineNo">470</span>  /** The upper-half split region column qualifier */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>  public static final byte [] SPLITB_QUALIFIER = Bytes.toBytes("splitB");<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  /** The lower-half merge region column qualifier */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  public static final byte[] MERGEA_QUALIFIER = Bytes.toBytes("mergeA");<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /** The upper-half merge region column qualifier */<a name="line.476"></a>
+<span class="sourceLineNo">477</span>  public static final byte[] MERGEB_QUALIFIER = Bytes.toBytes("mergeB");<a name="line.477"></a>
+<span class="sourceLineNo">478</span><a name="line.478"></a>
+<span class="sourceLineNo">479</span>  /** The catalog family as a string*/<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static final String TABLE_FAMILY_STR = "table";<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>  /** The catalog family */<a name="line.482"></a>
+<span class="sourceLineNo">483</span>  public static final byte [] TABLE_FAMILY = Bytes.toBytes(TABLE_FAMILY_STR);<a name="line.483"></a>
+<span class="sourceLineNo">484</span><a name="line.484"></a>
+<span class="sourceLineNo">485</span>  /** The serialized table state qualifier */<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  public static final byte[] TABLE_STATE_QUALIFIER = Bytes.toBytes("state");<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * The meta table version column qualifier.<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * We keep current version of the meta table in this column in &lt;code&gt;-ROOT-&lt;/code&gt;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * table: i.e. in the 'info:v' column.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static final byte [] META_VERSION_QUALIFIER = Bytes.toBytes("v");<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
+<span class="sourceLineNo">497</span>   * The current version of the meta table.<a name="line.497"></a>
+<span class="sourceLineNo">498</span>   * - pre-hbase 0.92.  There is no META_VERSION column in the root table<a name="line.498"></a>
+<span class="sourceLineNo">499</span>   * in this case. The meta has HTableDescriptor serialized into the HRegionInfo;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>   * - version 0 is 0.92 and 0.94. Meta data has serialized HRegionInfo's using<a name="line.500"></a>
+<span class="sourceLineNo">501</span>   * Writable serialization, and HRegionInfo's does not contain HTableDescriptors.<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   * - version 1 for 0.96+ keeps HRegionInfo data structures, but changes the<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * byte[] serialization from Writables to Protobuf.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * See HRegionInfo.VERSION<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  public static final short META_VERSION = 1;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  /**<a name="line.508"></a>

<TRUNCATED>

[41/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html
index 682e29c..6db3829 100644
--- a/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html
+++ b/devapidocs/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html
@@ -100,7 +100,7 @@
 <hr>
 <br>
 <pre><a href="../../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.50">PrefixTreeSeeker</a>
+public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.49">PrefixTreeSeeker</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder.EncodedSeeker</a></pre>
 <div class="block">These methods have the same definition as any implementation of the EncodedSeeker.
@@ -313,7 +313,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>block</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.52">block</a></pre>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.51">block</a></pre>
 </li>
 </ul>
 <a name="includeMvccVersion">
@@ -322,7 +322,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>includeMvccVersion</h4>
-<pre>protected&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.53">includeMvccVersion</a></pre>
+<pre>protected&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.52">includeMvccVersion</a></pre>
 </li>
 </ul>
 <a name="ptSearcher">
@@ -331,7 +331,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>ptSearcher</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.html" title="class in org.apache.hadoop.hbase.codec.prefixtree.decode">PrefixTreeArraySearcher</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.54">ptSearcher</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.html" title="class in org.apache.hadoop.hbase.codec.prefixtree.decode">PrefixTreeArraySearcher</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.53">ptSearcher</a></pre>
 </li>
 </ul>
 <a name="USE_POSITION_BEFORE">
@@ -340,7 +340,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>USE_POSITION_BEFORE</h4>
-<pre>private static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.149">USE_POSITION_BEFORE</a></pre>
+<pre>private static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.148">USE_POSITION_BEFORE</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeSeeker.USE_POSITION_BEFORE">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -358,7 +358,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PrefixTreeSeeker</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.56">PrefixTreeSeeker</a>(boolean&nbsp;includeMvccVersion)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.55">PrefixTreeSeeker</a>(boolean&nbsp;includeMvccVersion)</pre>
 </li>
 </ul>
 </li>
@@ -375,7 +375,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>setCurrentBuffer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.61">setCurrentBuffer</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;fullBlockBuffer)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.60">setCurrentBuffer</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;fullBlockBuffer)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#setCurrentBuffer(org.apache.hadoop.hbase.nio.ByteBuff)">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Set on which buffer there will be done seeking.</div>
 <dl>
@@ -390,7 +390,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>releaseCurrentSearcher</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.73">releaseCurrentSearcher</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.72">releaseCurrentSearcher</a>()</pre>
 <div class="block"><p>
  Currently unused.
  </p>
@@ -404,7 +404,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>getKey</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.79">getKey</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.78">getKey</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#getKey()">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">From the current position creates a cell using the key part
  of the current buffer</div>
@@ -420,7 +420,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueShallowCopy</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.85">getValueShallowCopy</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.84">getValueShallowCopy</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#getValueShallowCopy()">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Does a shallow copy of the value at the current position. A shallow
  copy is possible because the returned buffer refers to the backing array
@@ -437,7 +437,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>getCell</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.93">getCell</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.92">getCell</a>()</pre>
 <div class="block">currently must do deep copy into new array</div>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -451,7 +451,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>get</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.130">get</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.129">get</a>()</pre>
 <div class="block"><p>
  Currently unused.
  </p><p>
@@ -469,7 +469,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>rewind</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.135">rewind</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.134">rewind</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#rewind()">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Set position to beginning of given block</div>
 <dl>
@@ -484,7 +484,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>next</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.140">next</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.139">next</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#next()">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Move to next position</div>
 <dl>
@@ -499,7 +499,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>advance</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.144">advance</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.143">advance</a>()</pre>
 </li>
 </ul>
 <a name="seekToOrBeforeUsingPositionAtOrBefore(org.apache.hadoop.hbase.Cell, boolean)">
@@ -508,7 +508,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>seekToOrBeforeUsingPositionAtOrBefore</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.156">seekToOrBeforeUsingPositionAtOrBefore</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;kv,
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.155">seekToOrBeforeUsingPositionAtOrBefore</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;kv,
                                         boolean&nbsp;seekBefore)</pre>
 </li>
 </ul>
@@ -518,7 +518,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>seekToOrBeforeUsingPositionAtOrAfter</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.172">seekToOrBeforeUsingPositionAtOrAfter</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;kv,
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.171">seekToOrBeforeUsingPositionAtOrAfter</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;kv,
                                        boolean&nbsp;seekBefore)</pre>
 </li>
 </ul>
@@ -528,7 +528,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>seekToKeyInBlock</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.203">seekToKeyInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.202">seekToKeyInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key,
                    boolean&nbsp;forceBeforeOnExactMatch)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#seekToKeyInBlock(org.apache.hadoop.hbase.Cell,%20boolean)">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Moves the seeker position within the current block to:
@@ -554,7 +554,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>compareKey</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.212">compareKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html#line.211">compareKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
              <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#compareKey(org.apache.hadoop.hbase.CellComparator,%20org.apache.hadoop.hbase.Cell)">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Compare the given key against the current key</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 798bb36..0a4b032 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -161,14 +161,14 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">FuzzyRowFilter.SatisfiesCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">Filter.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">FilterList.Operator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">BitComparator.BitwiseOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">FuzzyRowFilter.Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">CompareFilter.CompareOp</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">Filter.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">FilterWrapper.FilterRowRetCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">BitComparator.BitwiseOp</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">FuzzyRowFilter.SatisfiesCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">RegexStringComparator.EngineType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">FilterList.Operator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="strong">CompareFilter.CompareOp</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html b/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html
index 97b340c..34d852d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/class-use/HeapSize.html
@@ -343,7 +343,7 @@
 <tr class="altColor">
 <td class="colFirst"><code>class&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></strong></code>
-<div class="block">Reading <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and 2 blocks, and writing version 2 blocks.</div>
+<div class="block">Reads <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version 1 and version 2 blocks but writes version 2 blocks only.</div>
 </td>
 </tr>
 <tr class="rowColor">
@@ -429,7 +429,7 @@
 <td class="colFirst"><code>class&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></strong></code>
 <div class="block">BucketCache uses <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketAllocator</code></a> to allocate/free blocks, and uses
- <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ramCache"><code>BucketCache.ramCache</code></a> and <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#backingMap"><code>BucketCache.backingMap</code></a> in order to
+ BucketCache#ramCache and BucketCache#backingMap in order to
  determine if a given element is in the cache.</div>
 </td>
 </tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
index e491858..260ef48 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html
@@ -104,7 +104,7 @@
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1120">BufferedDataBlockEncoder.BufferedDataBlockEncodingState</a>
+<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.1122">BufferedDataBlockEncoder.BufferedDataBlockEncodingState</a>
 extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/EncodingState.html" title="class in org.apache.hadoop.hbase.io.encoding">EncodingState</a></pre>
 </li>
 </ul>
@@ -190,7 +190,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/EncodingS
 <ul class="blockListLast">
 <li class="blockList">
 <h4>unencodedDataSizeWritten</h4>
-<pre>int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html#line.1121">unencodedDataSizeWritten</a></pre>
+<pre>int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html#line.1123">unencodedDataSizeWritten</a></pre>
 </li>
 </ul>
 </li>
@@ -207,7 +207,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/EncodingS
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BufferedDataBlockEncoder.BufferedDataBlockEncodingState</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html#line.1120">BufferedDataBlockEncoder.BufferedDataBlockEncodingState</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedDataBlockEncodingState.html#line.1122">BufferedDataBlockEncoder.BufferedDataBlockEncodingState</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
index 3f1acff..73fbcdc 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>protected abstract static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.689">BufferedDataBlockEncoder.BufferedEncodedSeeker</a>&lt;STATE extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder.SeekerState</a>&gt;
+<pre>protected abstract static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html#line.691">BufferedDataBlockEncoder.BufferedEncodedSeeker</a>&lt;STATE extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder.SeekerState</a>&gt;
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder.EncodedSeeker</a></pre>
 </li>
@@ -322,7 +322,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>decodingCtx</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.692">decodingCtx</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.694">decodingCtx</a></pre>
 </li>
 </ul>
 <a name="comparator">
@@ -331,7 +331,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>comparator</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.693">comparator</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.695">comparator</a></pre>
 </li>
 </ul>
 <a name="currentBuffer">
@@ -340,7 +340,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>currentBuffer</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.694">currentBuffer</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.696">currentBuffer</a></pre>
 </li>
 </ul>
 <a name="tagCompressionContext">
@@ -349,7 +349,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>tagCompressionContext</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/TagCompressionContext.html" title="class in org.apache.hadoop.hbase.io">TagCompressionContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.695">tagCompressionContext</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/TagCompressionContext.html" title="class in org.apache.hadoop.hbase.io">TagCompressionContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.697">tagCompressionContext</a></pre>
 </li>
 </ul>
 <a name="keyOnlyKV">
@@ -358,7 +358,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>keyOnlyKV</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue.KeyOnlyKeyValue</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.696">keyOnlyKV</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue.KeyOnlyKeyValue</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.698">keyOnlyKV</a></pre>
 </li>
 </ul>
 <a name="tmpPair">
@@ -367,7 +367,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>tmpPair</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/ObjectIntPair.html" title="class in org.apache.hadoop.hbase.util">ObjectIntPair</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.699">tmpPair</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/ObjectIntPair.html" title="class in org.apache.hadoop.hbase.util">ObjectIntPair</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.701">tmpPair</a></pre>
 </li>
 </ul>
 <a name="current">
@@ -376,7 +376,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>current</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html" title="type parameter in BufferedDataBlockEncoder.BufferedEncodedSeeker">STATE</a> extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder.SeekerState</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.700">current</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html" title="type parameter in BufferedDataBlockEncoder.BufferedEncodedSeeker">STATE</a> extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder.SeekerState</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.702">current</a></pre>
 </li>
 </ul>
 <a name="previous">
@@ -385,7 +385,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>previous</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html" title="type parameter in BufferedDataBlockEncoder.BufferedEncodedSeeker">STATE</a> extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder.SeekerState</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.700">previous</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html" title="type parameter in BufferedDataBlockEncoder.BufferedEncodedSeeker">STATE</a> extends <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html" title="class in org.apache.hadoop.hbase.io.encoding">BufferedDataBlockEncoder.SeekerState</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.702">previous</a></pre>
 </li>
 </ul>
 </li>
@@ -402,7 +402,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BufferedDataBlockEncoder.BufferedEncodedSeeker</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.702">BufferedDataBlockEncoder.BufferedEncodedSeeker</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.704">BufferedDataBlockEncoder.BufferedEncodedSeeker</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
                                               <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;decodingCtx)</pre>
 </li>
 </ul>
@@ -420,7 +420,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>includesMvcc</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.717">includesMvcc</a>()</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.719">includesMvcc</a>()</pre>
 </li>
 </ul>
 <a name="includesTags()">
@@ -429,7 +429,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>includesTags</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.721">includesTags</a>()</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.723">includesTags</a>()</pre>
 </li>
 </ul>
 <a name="compareKey(org.apache.hadoop.hbase.CellComparator, org.apache.hadoop.hbase.Cell)">
@@ -438,7 +438,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>compareKey</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.726">compareKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.728">compareKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
              <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#compareKey(org.apache.hadoop.hbase.CellComparator,%20org.apache.hadoop.hbase.Cell)">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Compare the given key against the current key</div>
@@ -454,7 +454,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>setCurrentBuffer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.732">setCurrentBuffer</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buffer)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.734">setCurrentBuffer</a>(<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;buffer)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#setCurrentBuffer(org.apache.hadoop.hbase.nio.ByteBuff)">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Set on which buffer there will be done seeking.</div>
 <dl>
@@ -469,7 +469,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>getKey</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.747">getKey</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.749">getKey</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#getKey()">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">From the current position creates a cell using the key part
  of the current buffer</div>
@@ -485,7 +485,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueShallowCopy</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.754">getValueShallowCopy</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.756">getValueShallowCopy</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#getValueShallowCopy()">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Does a shallow copy of the value at the current position. A shallow
  copy is possible because the returned buffer refers to the backing array
@@ -502,7 +502,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>getCell</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.763">getCell</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.765">getCell</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#getCell()">getCell</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder.EncodedSeeker</a></code></dd>
@@ -515,7 +515,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>rewind</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.768">rewind</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.770">rewind</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#rewind()">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Set position to beginning of given block</div>
 <dl>
@@ -530,7 +530,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>next</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.779">next</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.781">next</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#next()">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Move to next position</div>
 <dl>
@@ -545,7 +545,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>decodeTags</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.789">decodeTags</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.791">decodeTags</a>()</pre>
 </li>
 </ul>
 <a name="seekToKeyInBlock(org.apache.hadoop.hbase.Cell, boolean)">
@@ -554,7 +554,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>seekToKeyInBlock</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.815">seekToKeyInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;seekCell,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.817">seekToKeyInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;seekCell,
                    boolean&nbsp;seekBefore)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#seekToKeyInBlock(org.apache.hadoop.hbase.Cell,%20boolean)">DataBlockEncoder.EncodedSeeker</a></code></strong></div>
 <div class="block">Moves the seeker position within the current block to:
@@ -580,7 +580,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>compareTypeBytes</h4>
-<pre>private&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.914">compareTypeBytes</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key,
+<pre>private&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.916">compareTypeBytes</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key,
                    <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;right)</pre>
 </li>
 </ul>
@@ -590,7 +590,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>findCommonPrefixInRowPart</h4>
-<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.927">findCommonPrefixInRowPart</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
+<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.929">findCommonPrefixInRowPart</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
                             <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;right,
                             int&nbsp;rowCommonPrefix)</pre>
 </li>
@@ -601,7 +601,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>findCommonPrefixInFamilyPart</h4>
-<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.933">findCommonPrefixInFamilyPart</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
+<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.935">findCommonPrefixInFamilyPart</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
                                <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;right,
                                int&nbsp;familyCommonPrefix)</pre>
 </li>
@@ -612,7 +612,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>findCommonPrefixInQualifierPart</h4>
-<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.941">findCommonPrefixInQualifierPart</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
+<pre>private static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.943">findCommonPrefixInQualifierPart</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
                                   <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;right,
                                   int&nbsp;qualifierCommonPrefix)</pre>
 </li>
@@ -623,7 +623,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>moveToPrevious</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.949">moveToPrevious</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.951">moveToPrevious</a>()</pre>
 </li>
 </ul>
 <a name="createSeekerState()">
@@ -632,7 +632,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>createSeekerState</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html" title="type parameter in BufferedDataBlockEncoder.BufferedEncodedSeeker">STATE</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.976">createSeekerState</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html" title="type parameter in BufferedDataBlockEncoder.BufferedEncodedSeeker">STATE</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.978">createSeekerState</a>()</pre>
 </li>
 </ul>
 <a name="decodeFirst()">
@@ -641,7 +641,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockList">
 <li class="blockList">
 <h4>decodeFirst</h4>
-<pre>protected abstract&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.982">decodeFirst</a>()</pre>
+<pre>protected abstract&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.984">decodeFirst</a>()</pre>
 </li>
 </ul>
 <a name="decodeNext()">
@@ -650,7 +650,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>decodeNext</h4>
-<pre>protected abstract&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.983">decodeNext</a>()</pre>
+<pre>protected abstract&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.BufferedEncodedSeeker.html#line.985">decodeNext</a>()</pre>
 </li>
 </ul>
 </li>


[31/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
index 8f7fcf6..6631f59 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1130">BucketCache.BucketEntry</a>
+<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1134">BucketCache.BucketEntry</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a></pre>
 <div class="block">Item in cache. We expect this to be where most memory goes. Java uses 8
@@ -274,7 +274,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1131">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1135">serialVersionUID</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry.serialVersionUID">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -284,7 +284,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>COMPARATOR</h4>
-<pre>static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1134">COMPARATOR</a></pre>
+<pre>static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1138">COMPARATOR</a></pre>
 </li>
 </ul>
 <a name="offsetBase">
@@ -293,7 +293,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>offsetBase</h4>
-<pre>private&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1144">offsetBase</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1148">offsetBase</a></pre>
 </li>
 </ul>
 <a name="length">
@@ -302,7 +302,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>length</h4>
-<pre>private&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1145">length</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1149">length</a></pre>
 </li>
 </ul>
 <a name="offset1">
@@ -311,7 +311,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>offset1</h4>
-<pre>private&nbsp;byte <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1146">offset1</a></pre>
+<pre>private&nbsp;byte <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1150">offset1</a></pre>
 </li>
 </ul>
 <a name="deserialiserIndex">
@@ -320,7 +320,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>deserialiserIndex</h4>
-<pre>byte <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1147">deserialiserIndex</a></pre>
+<pre>byte <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1151">deserialiserIndex</a></pre>
 </li>
 </ul>
 <a name="accessCounter">
@@ -329,7 +329,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>accessCounter</h4>
-<pre>private volatile&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1148">accessCounter</a></pre>
+<pre>private volatile&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1152">accessCounter</a></pre>
 </li>
 </ul>
 <a name="priority">
@@ -338,7 +338,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>priority</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1149">priority</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1153">priority</a></pre>
 </li>
 </ul>
 <a name="markedForEvict">
@@ -347,7 +347,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>markedForEvict</h4>
-<pre>private volatile&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1151">markedForEvict</a></pre>
+<pre>private volatile&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1155">markedForEvict</a></pre>
 </li>
 </ul>
 <a name="refCount">
@@ -356,7 +356,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>refCount</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1152">refCount</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1156">refCount</a></pre>
 </li>
 </ul>
 <a name="cachedTime">
@@ -365,7 +365,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>cachedTime</h4>
-<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1157">cachedTime</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1161">cachedTime</a></pre>
 <div class="block">Time this block was cached.  Presumes we are created just before we are added to the cache.</div>
 </li>
 </ul>
@@ -383,7 +383,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketCache.BucketEntry</h4>
-<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1159">BucketCache.BucketEntry</a>(long&nbsp;offset,
+<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1163">BucketCache.BucketEntry</a>(long&nbsp;offset,
                        int&nbsp;length,
                        long&nbsp;accessCounter,
                        boolean&nbsp;inMemory)</pre>
@@ -403,7 +403,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>offset</h4>
-<pre>long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1170">offset</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1174">offset</a>()</pre>
 </li>
 </ul>
 <a name="setOffset(long)">
@@ -412,7 +412,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>setOffset</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1176">setOffset</a>(long&nbsp;value)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1180">setOffset</a>(long&nbsp;value)</pre>
 </li>
 </ul>
 <a name="getLength()">
@@ -421,7 +421,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>getLength</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1183">getLength</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1187">getLength</a>()</pre>
 </li>
 </ul>
 <a name="deserializerReference(org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap)">
@@ -430,7 +430,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>deserializerReference</h4>
-<pre>protected&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1187">deserializerReference</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</pre>
+<pre>protected&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1191">deserializerReference</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</pre>
 </li>
 </ul>
 <a name="setDeserialiserReference(org.apache.hadoop.hbase.io.hfile.CacheableDeserializer, org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap)">
@@ -439,7 +439,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>setDeserialiserReference</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1193">setDeserialiserReference</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1197">setDeserialiserReference</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.html" title="interface in org.apache.hadoop.hbase.io.hfile">CacheableDeserializer</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&gt;&nbsp;deserializer,
                             <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap)</pre>
 </li>
 </ul>
@@ -449,7 +449,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>access</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1203">access</a>(long&nbsp;accessCounter)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1207">access</a>(long&nbsp;accessCounter)</pre>
 <div class="block">Block has been accessed. Update its local access counter.</div>
 </li>
 </ul>
@@ -459,7 +459,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockList">
 <li class="blockList">
 <h4>getPriority</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1210">getPriority</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockPriority</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1214">getPriority</a>()</pre>
 </li>
 </ul>
 <a name="getCachedTime()">
@@ -468,7 +468,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializabl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getCachedTime</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1214">getCachedTime</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html#line.1218">getCachedTime</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
index 397f535..e80c479 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>private class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1225">BucketCache.BucketEntryGroup</a>
+<pre>private class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1229">BucketCache.BucketEntryGroup</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntryGroup</a>&gt;</pre>
 <div class="block">Used to group bucket entries into priority buckets. There will be a
@@ -226,7 +226,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockList">
 <li class="blockList">
 <h4>queue</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">CachedEntryQueue</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1227">queue</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">CachedEntryQueue</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1231">queue</a></pre>
 </li>
 </ul>
 <a name="totalSize">
@@ -235,7 +235,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockList">
 <li class="blockList">
 <h4>totalSize</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1228">totalSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1232">totalSize</a></pre>
 </li>
 </ul>
 <a name="bucketSize">
@@ -244,7 +244,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>bucketSize</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1229">bucketSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1233">bucketSize</a></pre>
 </li>
 </ul>
 </li>
@@ -261,7 +261,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketCache.BucketEntryGroup</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1231">BucketCache.BucketEntryGroup</a>(long&nbsp;bytesToFree,
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1235">BucketCache.BucketEntryGroup</a>(long&nbsp;bytesToFree,
                             long&nbsp;blockSize,
                             long&nbsp;bucketSize)</pre>
 </li>
@@ -280,7 +280,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockList">
 <li class="blockList">
 <h4>add</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1237">add</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;block)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1241">add</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;block)</pre>
 </li>
 </ul>
 <a name="free(long)">
@@ -289,7 +289,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockList">
 <li class="blockList">
 <h4>free</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1242">free</a>(long&nbsp;toFree)</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1246">free</a>(long&nbsp;toFree)</pre>
 </li>
 </ul>
 <a name="overflow()">
@@ -298,7 +298,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockList">
 <li class="blockList">
 <h4>overflow</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1258">overflow</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1262">overflow</a>()</pre>
 </li>
 </ul>
 <a name="totalSize()">
@@ -307,7 +307,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockList">
 <li class="blockList">
 <h4>totalSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1262">totalSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1266">totalSize</a>()</pre>
 </li>
 </ul>
 <a name="compareTo(org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntryGroup)">
@@ -316,7 +316,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockList">
 <li class="blockList">
 <h4>compareTo</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1267">compareTo</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntryGroup</a>&nbsp;that)</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1271">compareTo</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntryGroup</a>&nbsp;that)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true#compareTo(T)" title="class or interface in java.lang">compareTo</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntryGroup</a>&gt;</code></dd>
@@ -329,7 +329,7 @@ implements <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1274">equals</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;that)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html#line.1278">equals</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;that)</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)" title="class or interface in java.lang">equals</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
index 713c40c..427e985 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
@@ -99,7 +99,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1284">BucketCache.RAMQueueEntry</a>
+<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1288">BucketCache.RAMQueueEntry</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Block Entry stored in the memory with key,data and so on</div>
 </li>
@@ -218,7 +218,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>key</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1285">key</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1289">key</a></pre>
 </li>
 </ul>
 <a name="data">
@@ -227,7 +227,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>data</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1286">data</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1290">data</a></pre>
 </li>
 </ul>
 <a name="accessCounter">
@@ -236,7 +236,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>accessCounter</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1287">accessCounter</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1291">accessCounter</a></pre>
 </li>
 </ul>
 <a name="inMemory">
@@ -245,7 +245,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>inMemory</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1288">inMemory</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1292">inMemory</a></pre>
 </li>
 </ul>
 </li>
@@ -262,7 +262,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketCache.RAMQueueEntry</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1290">BucketCache.RAMQueueEntry</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;bck,
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1294">BucketCache.RAMQueueEntry</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;bck,
                          <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;data,
                          long&nbsp;accessCounter,
                          boolean&nbsp;inMemory)</pre>
@@ -282,7 +282,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1298">getData</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1302">getData</a>()</pre>
 </li>
 </ul>
 <a name="getKey()">
@@ -291,7 +291,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getKey</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1302">getKey</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1306">getKey</a>()</pre>
 </li>
 </ul>
 <a name="access(long)">
@@ -300,7 +300,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>access</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1306">access</a>(long&nbsp;accessCounter)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1310">access</a>(long&nbsp;accessCounter)</pre>
 </li>
 </ul>
 <a name="writeToCache(org.apache.hadoop.hbase.io.hfile.bucket.IOEngine, org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator, org.apache.hadoop.hbase.io.hfile.bucket.UniqueIndexMap, java.util.concurrent.atomic.AtomicLong)">
@@ -309,7 +309,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writeToCache</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1310">writeToCache</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html#line.1314">writeToCache</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/IOEngine.html" title="interface in org.apache.hadoop.hbase.io.hfile.bucket">IOEngine</a>&nbsp;ioEngine,
                                    <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;bucketAllocator,
                                    <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/UniqueIndexMap.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">UniqueIndexMap</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;deserialiserMap,
                                    <a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a>&nbsp;realCacheSize)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
index de7fff4..199cf00 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.559">BucketCache.StatisticsThread</a>
+<pre>private static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.563">BucketCache.StatisticsThread</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a></pre>
 </li>
 </ul>
@@ -225,7 +225,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>bucketCache</h4>
-<pre>private final&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.560">bucketCache</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a> <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.564">bucketCache</a></pre>
 </li>
 </ul>
 </li>
@@ -242,7 +242,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketCache.StatisticsThread</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.562">BucketCache.StatisticsThread</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;bucketCache)</pre>
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.566">BucketCache.StatisticsThread</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;bucketCache)</pre>
 </li>
 </ul>
 </li>
@@ -259,7 +259,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Thread.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.569">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html#line.573">run</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true#run()" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
index ef92f5e..df1ac4b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre> class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.771">BucketCache.WriterThread</a>
+<pre> class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.777">BucketCache.WriterThread</a>
 extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.html" title="class in org.apache.hadoop.hbase.util">HasThread</a></pre>
 </li>
 </ul>
@@ -217,7 +217,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>inputQueue</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.772">inputQueue</a></pre>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt; <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.778">inputQueue</a></pre>
 </li>
 </ul>
 <a name="writerEnabled">
@@ -226,7 +226,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writerEnabled</h4>
-<pre>private volatile&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.773">writerEnabled</a></pre>
+<pre>private volatile&nbsp;boolean <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.779">writerEnabled</a></pre>
 </li>
 </ul>
 </li>
@@ -243,7 +243,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketCache.WriterThread</h4>
-<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.775">BucketCache.WriterThread</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;queue)</pre>
+<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.781">BucketCache.WriterThread</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;queue)</pre>
 </li>
 </ul>
 </li>
@@ -260,7 +260,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>disableWriter</h4>
-<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.781">disableWriter</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.787">disableWriter</a>()</pre>
 </li>
 </ul>
 <a name="run()">
@@ -269,7 +269,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.785">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.791">run</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true#run()" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>
@@ -284,7 +284,7 @@ extends <a href="../../../../../../../org/apache/hadoop/hbase/util/HasThread.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>doDrain</h4>
-<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.816">doDrain</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;entries)
+<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html#line.822">doDrain</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;entries)
        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Flush the entries in ramCache to IOEngine and add bucket entry to backingMap.
  Process all that are passed in even if failure being sure to remove from ramCache else we'll

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
index 6b1aff6..4faf145 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
@@ -104,7 +104,7 @@ public class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/h
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>, <a href="../../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></pre>
 <div class="block">BucketCache uses <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketAllocator</code></a> to allocate/free blocks, and uses
- <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ramCache"><code>ramCache</code></a> and <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#backingMap"><code>backingMap</code></a> in order to
+ BucketCache#ramCache and BucketCache#backingMap in order to
  determine if a given element is in the cache. The bucket cache can use on-heap or
  off-heap memory <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>ByteBufferIOEngine</code></a> or in a file <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>FileIOEngine</code></a> to
  store/read the block data.
@@ -113,7 +113,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
  <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>LruBlockCache</code></a>
 
  <p>BucketCache can be used as mainly a block cache (see
- <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>CombinedBlockCache</code></a>), combined with 
+ <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>CombinedBlockCache</code></a>), combined with
  LruBlockCache to decrease CMS GC and heap fragmentation.
 
  <p>It also can be used as a secondary cache (e.g. using a file on ssd/fusionio to store
@@ -440,8 +440,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <tr class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#disableCache()">disableCache</a></strong>()</code>
-<div class="block">Used to shut down the cache -or- turn it off in the case of something
- broken.</div>
+<div class="block">Used to shut down the cache -or- turn it off in the case of something broken.</div>
 </td>
 </tr>
 <tr class="altColor">
@@ -1154,7 +1153,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlock</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.399">getBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key,
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.400">getBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;key,
                  boolean&nbsp;caching,
                  boolean&nbsp;repeat,
                  boolean&nbsp;updateCacheMetrics)</pre>
@@ -1172,7 +1171,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>blockEvicted</h4>
-<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.455">blockEvicted</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.459">blockEvicted</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                 <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&nbsp;bucketEntry,
                 boolean&nbsp;decrementBlockNumber)</pre>
 </li>
@@ -1183,7 +1182,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>evictBlock</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.465">evictBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.469">evictBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#evictBlock(org.apache.hadoop.hbase.io.hfile.BlockCacheKey)">BlockCache</a></code></strong></div>
 <div class="block">Evict block from cache.</div>
 <dl>
@@ -1199,7 +1198,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>forceEvict</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.471">forceEvict</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.475">forceEvict</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
 </li>
 </ul>
 <a name="checkRamCache(org.apache.hadoop.hbase.io.hfile.BlockCacheKey)">
@@ -1208,7 +1207,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>checkRamCache</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.500">checkRamCache</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.504">checkRamCache</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
 </li>
 </ul>
 <a name="evictBlock(org.apache.hadoop.hbase.io.hfile.BlockCacheKey, boolean)">
@@ -1217,7 +1216,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>evictBlock</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.509">evictBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.513">evictBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                  boolean&nbsp;deletedBlock)</pre>
 </li>
 </ul>
@@ -1227,7 +1226,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>logStats</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.574">logStats</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.578">logStats</a>()</pre>
 </li>
 </ul>
 <a name="getRealCacheSize()">
@@ -1236,7 +1235,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getRealCacheSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.600">getRealCacheSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.604">getRealCacheSize</a>()</pre>
 </li>
 </ul>
 <a name="acceptableSize()">
@@ -1245,7 +1244,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>acceptableSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.604">acceptableSize</a>()</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.608">acceptableSize</a>()</pre>
 </li>
 </ul>
 <a name="singleSize()">
@@ -1254,7 +1253,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>singleSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.608">singleSize</a>()</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.612">singleSize</a>()</pre>
 </li>
 </ul>
 <a name="multiSize()">
@@ -1263,7 +1262,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>multiSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.613">multiSize</a>()</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.617">multiSize</a>()</pre>
 </li>
 </ul>
 <a name="memorySize()">
@@ -1272,7 +1271,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>memorySize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.618">memorySize</a>()</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.622">memorySize</a>()</pre>
 </li>
 </ul>
 <a name="freeSpace(java.lang.String)">
@@ -1281,7 +1280,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>freeSpace</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.629">freeSpace</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.633">freeSpace</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
 <div class="block">Free the space if the used size reaches acceptableSize() or one size block
  couldn't be allocated. When freeing the space, we use the LRU algorithm and
  ensure there must be some blocks evicted</div>
@@ -1294,7 +1293,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getRAMQueueEntries</h4>
-<pre>static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.925">getRAMQueueEntries</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;q,
+<pre>static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.931">getRAMQueueEntries</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;q,
                                                  <a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.RAMQueueEntry</a>&gt;&nbsp;receptacle)
                                                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Blocks until elements available in <code>q</code> then tries to grab as many as possible
@@ -1312,7 +1311,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>persistToFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.936">persistToFile</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.942">persistToFile</a>()
                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -1324,7 +1323,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>retrieveFromFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.958">retrieveFromFile</a>(int[]&nbsp;bucketSizes)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.964">retrieveFromFile</a>(int[]&nbsp;bucketSizes)
                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                               <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocatorException.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocatorException</a>,
                               <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a></pre>
@@ -1340,7 +1339,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>checkIOErrorIsTolerated</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1009">checkIOErrorIsTolerated</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1015">checkIOErrorIsTolerated</a>()</pre>
 <div class="block">Check whether we tolerate IO error this time. If the duration of IOEngine
  throwing errors exceeds ioErrorsDurationTimeTolerated, we will disable the
  cache</div>
@@ -1352,9 +1351,8 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>disableCache</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1026">disableCache</a>()</pre>
-<div class="block">Used to shut down the cache -or- turn it off in the case of something
- broken.</div>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1031">disableCache</a>()</pre>
+<div class="block">Used to shut down the cache -or- turn it off in the case of something broken.</div>
 </li>
 </ul>
 <a name="join()">
@@ -1363,7 +1361,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>join</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1040">join</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1044">join</a>()
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code></dd></dl>
@@ -1375,7 +1373,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1046">shutdown</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1050">shutdown</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#shutdown()">BlockCache</a></code></strong></div>
 <div class="block">Shutdown the cache.</div>
 <dl>
@@ -1390,7 +1388,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getStats</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheStats</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1063">getStats</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheStats</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1067">getStats</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#getStats()">BlockCache</a></code></strong></div>
 <div class="block">Get the statistics for this block cache.</div>
 <dl>
@@ -1405,7 +1403,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getAllocator</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1067">getAllocator</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1071">getAllocator</a>()</pre>
 </li>
 </ul>
 <a name="heapSize()">
@@ -1414,7 +1412,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1072">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1076">heapSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize()">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -1428,7 +1426,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>size</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1077">size</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1081">size</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#size()">BlockCache</a></code></strong></div>
 <div class="block">Returns the total size of the block cache, in bytes.</div>
 <dl>
@@ -1443,7 +1441,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getFreeSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1082">getFreeSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1086">getFreeSize</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#getFreeSize()">BlockCache</a></code></strong></div>
 <div class="block">Returns the free size of the block cache, in bytes.</div>
 <dl>
@@ -1458,7 +1456,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1087">getBlockCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1091">getBlockCount</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#getBlockCount()">BlockCache</a></code></strong></div>
 <div class="block">Returns the number of blocks currently cached in the block cache.</div>
 <dl>
@@ -1473,7 +1471,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getCurrentSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1092">getCurrentSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1096">getCurrentSize</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#getCurrentSize()">BlockCache</a></code></strong></div>
 <div class="block">Returns the occupied size of the block cache, in bytes.</div>
 <dl>
@@ -1488,7 +1486,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>evictBlocksByHfileName</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1104">evictBlocksByHfileName</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hfileName)</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1108">evictBlocksByHfileName</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hfileName)</pre>
 <div class="block">Evicts all blocks for a specific HFile.
  <p>
  This is used for evict-on-close to remove all blocks of a specific HFile.</div>
@@ -1504,7 +1502,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>stopWriterThreads</h4>
-<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1352">stopWriterThreads</a>()
+<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1359">stopWriterThreads</a>()
                  throws <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Only used in test</div>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -1517,7 +1515,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>iterator</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CachedBlock.html" title="interface in org.apache.hadoop.hbase.io.hfile">CachedBlock</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1361">iterator</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CachedBlock.html" title="interface in org.apache.hadoop.hbase.io.hfile">CachedBlock</a>&gt;&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1368">iterator</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Iterable.html?is-external=true#iterator()" title="class or interface in java.lang">iterator</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/CachedBlock.html" title="interface in org.apache.hadoop.hbase.io.hfile">CachedBlock</a>&gt;</code></dd>
@@ -1532,7 +1530,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockCaches</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>[]&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1451">getBlockCaches</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>[]&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1458">getBlockCaches</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#getBlockCaches()">getBlockCaches</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a></code></dd>
@@ -1545,7 +1543,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockList">
 <li class="blockList">
 <h4>returnBlock</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1456">returnBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1463">returnBlock</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey,
                <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.html" title="interface in org.apache.hadoop.hbase.io.hfile">Cacheable</a>&nbsp;block)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html#returnBlock(org.apache.hadoop.hbase.io.hfile.BlockCacheKey,%20org.apache.hadoop.hbase.io.hfile.Cacheable)">BlockCache</a></code></strong></div>
 <div class="block">Called when the scanner using the block decides to return the block once its usage
@@ -1566,7 +1564,7 @@ implements <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockC
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getRefCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1469">getRefCount</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#line.1476">getRefCount</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>&nbsp;cacheKey)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html
index b477da1..6e55d81 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-summary.html
@@ -116,7 +116,7 @@
 <td class="colFirst"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></td>
 <td class="colLast">
 <div class="block">BucketCache uses <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketAllocator</code></a> to allocate/free blocks, and uses
- <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ramCache"><code>BucketCache.ramCache</code></a> and <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#backingMap"><code>BucketCache.backingMap</code></a> in order to
+ BucketCache#ramCache and BucketCache#backingMap in order to
  determine if a given element is in the cache.</div>
 </td>
 </tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html
index eeb4321..8157923 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/package-use.html
@@ -116,7 +116,7 @@
 <tr class="rowColor">
 <td class="colOne"><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/class-use/BucketCache.html#org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>
 <div class="block">BucketCache uses <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketAllocator</code></a> to allocate/free blocks, and uses
- <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ramCache"><code>BucketCache.ramCache</code></a> and <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#backingMap"><code>BucketCache.backingMap</code></a> in order to
+ BucketCache#ramCache and BucketCache#backingMap in order to
  determine if a given element is in the cache.</div>
 </td>
 </tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCache.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCache.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCache.html
index bfd73de..4eda246 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCache.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCache.html
@@ -346,7 +346,7 @@
 <td class="colFirst"><code>class&nbsp;</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a></strong></code>
 <div class="block">BucketCache uses <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket"><code>BucketAllocator</code></a> to allocate/free blocks, and uses
- <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#ramCache"><code>BucketCache.ramCache</code></a> and <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#backingMap"><code>BucketCache.backingMap</code></a> in order to
+ BucketCache#ramCache and BucketCache#backingMap in order to
  determine if a given element is in the cache.</div>
 </td>
 </tr>


[16/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
index d8b6ca7..66dbcf3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
@@ -31,12 +31,12 @@
 <span class="sourceLineNo">023</span>import java.nio.ByteBuffer;<a name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.ByteBufferedCell;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.Cell;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HConstants;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.31"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.Cell;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.31"></a>
 <span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.32"></a>
 <span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.34"></a>
@@ -60,1113 +60,1115 @@
 <span class="sourceLineNo">052</span> */<a name="line.52"></a>
 <span class="sourceLineNo">053</span>@InterfaceAudience.Private<a name="line.53"></a>
 <span class="sourceLineNo">054</span>abstract class BufferedDataBlockEncoder implements DataBlockEncoder {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static int INITIAL_KEY_BUFFER_SIZE = 512;<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  @Override<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public ByteBuffer decodeKeyValues(DataInputStream source,<a name="line.59"></a>
-<span class="sourceLineNo">060</span>      HFileBlockDecodingContext blkDecodingCtx) throws IOException {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      throw new IOException(this.getClass().getName() + " only accepts "<a name="line.62"></a>
-<span class="sourceLineNo">063</span>          + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>    HFileBlockDefaultDecodingContext decodingCtx =<a name="line.66"></a>
-<span class="sourceLineNo">067</span>        (HFileBlockDefaultDecodingContext) blkDecodingCtx;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    if (decodingCtx.getHFileContext().isIncludesTags()<a name="line.68"></a>
-<span class="sourceLineNo">069</span>        &amp;&amp; decodingCtx.getHFileContext().isCompressTags()) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      if (decodingCtx.getTagCompressionContext() != null) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        // It will be overhead to create the TagCompressionContext again and again for every block<a name="line.71"></a>
-<span class="sourceLineNo">072</span>        // decoding.<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        decodingCtx.getTagCompressionContext().clear();<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      } else {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>        try {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>          TagCompressionContext tagCompressionContext = new TagCompressionContext(<a name="line.76"></a>
-<span class="sourceLineNo">077</span>              LRUDictionary.class, Byte.MAX_VALUE);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>          decodingCtx.setTagCompressionContext(tagCompressionContext);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>        } catch (Exception e) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>          throw new IOException("Failed to initialize TagCompressionContext", e);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>        }<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    return internalDecodeKeyValues(source, 0, 0, decodingCtx);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  /********************* common prefixes *************************/<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  // Having this as static is fine but if META is having DBE then we should<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  // change this.<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public static int compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    return Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        left.getRowLength() - rowCommonPrefix, right.getRowArray(), right.getRowOffset()<a name="line.92"></a>
-<span class="sourceLineNo">093</span>            + rowCommonPrefix, right.getRowLength() - rowCommonPrefix);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static int compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + familyCommonPrefix,<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        left.getFamilyLength() - familyCommonPrefix, right.getFamilyArray(),<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        right.getFamilyOffset() + familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public static int compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + qualCommonPrefix,<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        left.getQualifierLength() - qualCommonPrefix, right.getQualifierArray(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        right.getQualifierOffset() + qualCommonPrefix, right.getQualifierLength()<a name="line.105"></a>
-<span class="sourceLineNo">106</span>            - qualCommonPrefix);<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected static class SeekerState {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    protected ByteBuff currentBuffer;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    protected TagCompressionContext tagCompressionContext;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    protected int valueOffset = -1;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    protected int keyLength;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    protected int valueLength;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    protected int lastCommonPrefix;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    protected int tagsLength = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    protected int tagsOffset = -1;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    protected int tagsCompressedLength = 0;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    protected boolean uncompressTags = true;<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    /** We need to store a copy of the key. */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    protected byte[] keyBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    protected byte[] tagsBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    protected long memstoreTS;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    protected int nextKvOffset;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    protected KeyValue.KeyOnlyKeyValue currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    // many object creations.<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    private final ObjectIntPair&lt;ByteBuffer&gt; tmpPair;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    private final boolean includeTags;<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public SeekerState(ObjectIntPair&lt;ByteBuffer&gt; tmpPair, boolean includeTags) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      this.tmpPair = tmpPair;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      this.includeTags = includeTags;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    protected boolean isValid() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      return valueOffset != -1;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    protected void invalidate() {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      valueOffset = -1;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      tagsCompressedLength = 0;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      uncompressTags = true;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      currentBuffer = null;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    protected void ensureSpaceForKey() {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (keyLength &gt; keyBuffer.length) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        // rare case, but we need to handle arbitrary length of key<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        int newKeyBufferLength = Math.max(keyBuffer.length, 1) * 2;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        while (keyLength &gt; newKeyBufferLength) {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>          newKeyBufferLength *= 2;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        }<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        byte[] newKeyBuffer = new byte[newKeyBufferLength];<a name="line.157"></a>
-<span class="sourceLineNo">158</span>        System.arraycopy(keyBuffer, 0, newKeyBuffer, 0, keyBuffer.length);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        keyBuffer = newKeyBuffer;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>    protected void ensureSpaceForTags() {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      if (tagsLength &gt; tagsBuffer.length) {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        // rare case, but we need to handle arbitrary length of tags<a name="line.165"></a>
-<span class="sourceLineNo">166</span>        int newTagsBufferLength = Math.max(tagsBuffer.length, 1) * 2;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        while (tagsLength &gt; newTagsBufferLength) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>          newTagsBufferLength *= 2;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        byte[] newTagsBuffer = new byte[newTagsBufferLength];<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        System.arraycopy(tagsBuffer, 0, newTagsBuffer, 0, tagsBuffer.length);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        tagsBuffer = newTagsBuffer;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    protected void setKey(byte[] keyBuffer, long memTS) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      currentKey.setKey(keyBuffer, 0, keyLength);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      memstoreTS = memTS;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    /**<a name="line.181"></a>
-<span class="sourceLineNo">182</span>     * Copy the state from the next one into this instance (the previous state<a name="line.182"></a>
-<span class="sourceLineNo">183</span>     * placeholder). Used to save the previous state when we are advancing the<a name="line.183"></a>
-<span class="sourceLineNo">184</span>     * seeker to the next key/value.<a name="line.184"></a>
-<span class="sourceLineNo">185</span>     */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    protected void copyFromNext(SeekerState nextState) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      if (keyBuffer.length != nextState.keyBuffer.length) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>        keyBuffer = nextState.keyBuffer.clone();<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      } else if (!isValid()) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        // Note: we can only call isValid before we override our state, so this<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        // comes before all the assignments at the end of this method.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        System.arraycopy(nextState.keyBuffer, 0, keyBuffer, 0,<a name="line.192"></a>
-<span class="sourceLineNo">193</span>             nextState.keyLength);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      } else {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        // don't copy the common prefix between this key and the previous one<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        System.arraycopy(nextState.keyBuffer, nextState.lastCommonPrefix,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>            keyBuffer, nextState.lastCommonPrefix, nextState.keyLength<a name="line.197"></a>
-<span class="sourceLineNo">198</span>                - nextState.lastCommonPrefix);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      }<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      currentKey = nextState.currentKey;<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>      valueOffset = nextState.valueOffset;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      keyLength = nextState.keyLength;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      valueLength = nextState.valueLength;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      lastCommonPrefix = nextState.lastCommonPrefix;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      nextKvOffset = nextState.nextKvOffset;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      memstoreTS = nextState.memstoreTS;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      currentBuffer = nextState.currentBuffer;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      tagsOffset = nextState.tagsOffset;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      tagsLength = nextState.tagsLength;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (nextState.tagCompressionContext != null) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        tagCompressionContext = nextState.tagCompressionContext;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    public Cell toCell() {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      // Buffer backing the value and tags part from the HFileBlock's buffer<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      // When tag compression in use, this will be only the value bytes area.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ByteBuffer valAndTagsBuffer;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      int vOffset;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      int valAndTagsLength = this.valueLength;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      int tagsLenSerializationSize = 0;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      if (this.includeTags &amp;&amp; this.tagCompressionContext == null) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        // Include the tags part also. This will be the tags bytes + 2 bytes of for storing tags<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        // length<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        tagsLenSerializationSize = this.tagsOffset - (this.valueOffset + this.valueLength);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        valAndTagsLength += tagsLenSerializationSize + this.tagsLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      this.currentBuffer.asSubByteBuffer(this.valueOffset, valAndTagsLength, this.tmpPair);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      valAndTagsBuffer = this.tmpPair.getFirst();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      vOffset = this.tmpPair.getSecond();// This is the offset to value part in the BB<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      if (valAndTagsBuffer.hasArray()) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        return toOnheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      } else {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        return toOffheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private Cell toOnheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>        int tagsLenSerializationSize) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      byte[] tagsArray = HConstants.EMPTY_BYTE_ARRAY;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      int tOffset = 0;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      if (this.includeTags) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        if (this.tagCompressionContext == null) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          tagsArray = valAndTagsBuffer.array();<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          tOffset = valAndTagsBuffer.arrayOffset() + vOffset + this.valueLength<a name="line.246"></a>
-<span class="sourceLineNo">247</span>              + tagsLenSerializationSize;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        } else {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          tagsArray = Bytes.copy(tagsBuffer, 0, this.tagsLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          tOffset = 0;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      return new OnheapDecodedCell(Bytes.copy(keyBuffer, 0, this.keyLength),<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer.array(),<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          valAndTagsBuffer.arrayOffset() + vOffset, this.valueLength, memstoreTS, tagsArray,<a name="line.257"></a>
-<span class="sourceLineNo">258</span>          tOffset, this.tagsLength);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>    private Cell toOffheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        int tagsLenSerializationSize) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      ByteBuffer tagsBuf =  HConstants.EMPTY_BYTE_BUFFER;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      int tOffset = 0;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (this.includeTags) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        if (this.tagCompressionContext == null) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          tagsBuf = valAndTagsBuffer;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          tOffset = vOffset + this.valueLength + tagsLenSerializationSize;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        } else {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          tagsBuf = ByteBuffer.wrap(Bytes.copy(tagsBuffer, 0, this.tagsLength));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          tOffset = 0;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return new OffheapDecodedCell(ByteBuffer.wrap(Bytes.copy(keyBuffer, 0, this.keyLength)),<a name="line.274"></a>
-<span class="sourceLineNo">275</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.276"></a>
-<span class="sourceLineNo">277</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer, vOffset,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          this.valueLength, memstoreTS, tagsBuf, tOffset, this.tagsLength);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    }<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * Copies only the key part of the keybuffer by doing a deep copy and passes the<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   * seeker state members for taking a clone.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Note that the value byte[] part is still pointing to the currentBuffer and<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * represented by the valueOffset and valueLength<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  // We return this as a Cell to the upper layers of read flow and might try setting a new SeqId<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  // there. So this has to be an instance of SettableSequenceId.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  protected static class OnheapDecodedCell implements Cell, HeapSize, SettableSequenceId,<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      Streamable {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.293"></a>
-<span class="sourceLineNo">294</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY));<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    private byte[] keyOnlyBuffer;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    private short rowLength;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    private int familyOffset;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    private byte familyLength;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    private int qualifierOffset;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    private int qualifierLength;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    private long timestamp;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    private byte typeByte;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    private byte[] valueBuffer;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    private int valueOffset;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    private int valueLength;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    private byte[] tagsBuffer;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    private int tagsOffset;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    private int tagsLength;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    private long seqId;<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>    protected OnheapDecodedCell(byte[] keyBuffer, short rowLength, int familyOffset,<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        byte[] valueBuffer, int valueOffset, int valueLen, long seqId, byte[] tagsBuffer,<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        int tagsOffset, int tagsLength) {<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      this.keyOnlyBuffer = keyBuffer;<a name="line.315"></a>
-<span class="sourceLineNo">316</span>      this.rowLength = rowLength;<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      this.familyOffset = familyOffset;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.familyLength = familyLength;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>      this.qualifierOffset = qualOffset;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.qualifierLength = qualLength;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>      this.timestamp = timeStamp;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      this.typeByte = typeByte;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>      this.valueBuffer = valueBuffer;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      this.valueOffset = valueOffset;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      this.valueLength = valueLen;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      this.tagsBuffer = tagsBuffer;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      this.tagsOffset = tagsOffset;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      this.tagsLength = tagsLength;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      setSequenceId(seqId);<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>    @Override<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    public byte[] getRowArray() {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      return keyOnlyBuffer;<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    }<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>    @Override<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    public byte[] getFamilyArray() {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      return keyOnlyBuffer;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>    @Override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    public byte[] getQualifierArray() {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>      return keyOnlyBuffer;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
-<span class="sourceLineNo">346</span><a name="line.346"></a>
-<span class="sourceLineNo">347</span>    @Override<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    public int getRowOffset() {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      return Bytes.SIZEOF_SHORT;<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>    @Override<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    public short getRowLength() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      return rowLength;<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>    @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    public int getFamilyOffset() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return familyOffset;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>    @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    public byte getFamilyLength() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      return familyLength;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>    @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    public int getQualifierOffset() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      return qualifierOffset;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    }<a name="line.370"></a>
-<span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>    @Override<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    public int getQualifierLength() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      return qualifierLength;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>    @Override<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    public long getTimestamp() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      return timestamp;<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>    @Override<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    public byte getTypeByte() {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return typeByte;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>    @Override<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    public long getSequenceId() {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      return seqId;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>    @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    public byte[] getValueArray() {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      return this.valueBuffer;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    @Override<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    public int getValueOffset() {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      return valueOffset;<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>    @Override<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    public int getValueLength() {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      return valueLength;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    @Override<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    public byte[] getTagsArray() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      return this.tagsBuffer;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    @Override<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    public int getTagsOffset() {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      return this.tagsOffset;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    }<a name="line.415"></a>
-<span class="sourceLineNo">416</span><a name="line.416"></a>
-<span class="sourceLineNo">417</span>    @Override<a name="line.417"></a>
-<span class="sourceLineNo">418</span>    public int getTagsLength() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      return tagsLength;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    @Override<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public String toString() {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          + getValueLength() + "/seqid=" + seqId;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    public void setSequenceId(long seqId) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      this.seqId = seqId;<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    public long heapSize() {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    public int write(OutputStream out) throws IOException {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      return write(out, true);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>          tagsLength, withTags);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      ByteBufferUtils.putInt(out, keyOnlyBuffer.length);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // Write key<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      out.write(keyOnlyBuffer);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      // Write value<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      out.write(this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      if (withTags) {<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        // 2 bytes tags length followed by tags bytes<a name="line.455"></a>
-<span class="sourceLineNo">456</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        out.write(this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  protected static class OffheapDecodedCell extends ByteBufferedCell implements HeapSize,<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      SettableSequenceId, Streamable {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.BYTE_BUFFER));<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    private ByteBuffer keyBuffer;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    private short rowLength;<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    private int familyOffset;<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    private byte familyLength;<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    private int qualifierOffset;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    private int qualifierLength;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    private long timestamp;<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    private byte typeByte;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    private ByteBuffer valueBuffer;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    private int valueOffset;<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    private int valueLength;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private ByteBuffer tagsBuffer;<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    private int tagsOffset;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    private int tagsLength;<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    private long seqId;<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>    protected OffheapDecodedCell(ByteBuffer keyBuffer, short rowLength, int familyOffset,<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.488"></a>
-<span class="sourceLineNo">489</span>        ByteBuffer valueBuffer, int valueOffset, int valueLen, long seqId, ByteBuffer tagsBuffer,<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        int tagsOffset, int tagsLength) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      // The keyBuffer is always onheap<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      assert keyBuffer.hasArray();<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      assert keyBuffer.arrayOffset() == 0;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      this.keyBuffer = keyBuffer;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      this.rowLength = rowLength;<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      this.familyOffset = familyOffset;<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      this.familyLength = familyLength;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      this.qualifierOffset = qualOffset;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      this.qualifierLength = qualLength;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      this.timestamp = timeStamp;<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      this.typeByte = typeByte;<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      this.valueBuffer = valueBuffer;<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      this.valueOffset = valueOffset;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      this.valueLength = valueLen;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      this.tagsBuffer = tagsBuffer;<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      this.tagsOffset = tagsOffset;<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      this.tagsLength = tagsLength;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      setSequenceId(seqId);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public byte[] getRowArray() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return this.keyBuffer.array();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getRowOffset() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return getRowPosition();<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public short getRowLength() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowLength;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span><a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public byte[] getFamilyArray() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return this.keyBuffer.array();<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public int getFamilyOffset() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return getFamilyPosition();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public byte getFamilyLength() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return this.familyLength;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span><a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public byte[] getQualifierArray() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.keyBuffer.array();<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierOffset() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return getQualifierPosition();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public int getQualifierLength() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.qualifierLength;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public long getTimestamp() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return this.timestamp;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public byte getTypeByte() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.typeByte;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public long getSequenceId() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.seqId;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public byte[] getValueArray() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return CellUtil.cloneValue(this);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public int getValueOffset() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      return 0;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>    @Override<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    public int getValueLength() {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return this.valueLength;<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>    @Override<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    public byte[] getTagsArray() {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      return CellUtil.cloneTags(this);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    }<a name="line.589"></a>
-<span class="sourceLineNo">590</span><a name="line.590"></a>
-<span class="sourceLineNo">591</span>    @Override<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    public int getTagsOffset() {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      return 0;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    }<a name="line.594"></a>
-<span class="sourceLineNo">595</span><a name="line.595"></a>
-<span class="sourceLineNo">596</span>    @Override<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    public int getTagsLength() {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      return this.tagsLength;<a name="line.598"></a>
-<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
-<span class="sourceLineNo">600</span><a name="line.600"></a>
-<span class="sourceLineNo">601</span>    @Override<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    public ByteBuffer getRowByteBuffer() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return this.keyBuffer;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    @Override<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    public int getRowPosition() {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      return Bytes.SIZEOF_SHORT;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    @Override<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      return this.keyBuffer;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>    @Override<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    public int getFamilyPosition() {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      return this.familyOffset;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    }<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    @Override<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      return this.keyBuffer;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    }<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>    @Override<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    public int getQualifierPosition() {<a name="line.627"></a>
-<span class="sourceLineNo">628</span>      return this.qualifierOffset;<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    @Override<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    public ByteBuffer getValueByteBuffer() {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      return this.valueBuffer;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
-<span class="sourceLineNo">635</span><a name="line.635"></a>
-<span class="sourceLineNo">636</span>    @Override<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    public int getValuePosition() {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this.valueOffset;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    @Override<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      return this.tagsBuffer;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
-<span class="sourceLineNo">645</span><a name="line.645"></a>
-<span class="sourceLineNo">646</span>    @Override<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    public int getTagsPosition() {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return this.tagsOffset;<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
-<span class="sourceLineNo">650</span><a name="line.650"></a>
-<span class="sourceLineNo">651</span>    @Override<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    public long heapSize() {<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>    @Override<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    public void setSequenceId(long seqId) {<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      this.seqId = seqId;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span><a name="line.660"></a>
-<span class="sourceLineNo">661</span>    @Override<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    public int write(OutputStream out) throws IOException {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>      return write(out, true);<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    }<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    @Override<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          tagsLength, withTags);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      ByteBufferUtils.putInt(out, keyBuffer.capacity());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      // Write key<a name="line.673"></a>
-<span class="sourceLineNo">674</span>      out.write(keyBuffer.array());<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      // Write value<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      ByteBufferUtils.copyBufferToStream(out, this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      if (withTags) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>        // 2 bytes tags length followed by tags bytes<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.682"></a>
-<span class="sourceLineNo">683</span>        ByteBufferUtils.copyBufferToStream(out, this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  protected abstract static class<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      BufferedEncodedSeeker&lt;STATE extends SeekerState&gt;<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      implements EncodedSeeker {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    protected HFileBlockDecodingContext decodingCtx;<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    protected final CellComparator comparator;<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    protected ByteBuff currentBuffer;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    protected TagCompressionContext tagCompressionContext = null;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    protected  KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    // many object creations.<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    protected final ObjectIntPair&lt;ByteBuffer&gt; tmpPair = new ObjectIntPair&lt;ByteBuffer&gt;();<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    protected STATE current, previous;<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>    public BufferedEncodedSeeker(CellComparator comparator,<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        HFileBlockDecodingContext decodingCtx) {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      this.comparator = comparator;<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.decodingCtx = decodingCtx;<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      if (decodingCtx.getHFileContext().isCompressTags()) {<a name="line.706"></a>
-<span class="sourceLineNo">707</span>        try {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>          tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>        } catch (Exception e) {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>          throw new RuntimeException("Failed to initialize TagCompressionContext", e);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>        }<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      }<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      current = createSeekerState(); // always valid<a name="line.713"></a>
-<span class="sourceLineNo">714</span>      previous = createSeekerState(); // may not be valid<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    }<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>    protected boolean includesMvcc() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>      return this.decodingCtx.getHFileContext().isIncludesMvcc();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    }<a name="line.719"></a>
-<span class="sourceLineNo">720</span><a name="line.720"></a>
-<span class="sourceLineNo">721</span>    protected boolean includesTags() {<a name="line.721"></a>
-<span class="sourceLineNo">722</span>      return this.decodingCtx.getHFileContext().isIncludesTags();<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    }<a name="line.723"></a>
-<span class="sourceLineNo">724</span><a name="line.724"></a>
-<span class="sourceLineNo">725</span>    @Override<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    public int compareKey(CellComparator comparator, Cell key) {<a name="line.726"></a>
-<span class="sourceLineNo">727</span>      keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return comparator.compareKeyIgnoresMvcc(key, keyOnlyKV);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    public void setCurrentBuffer(ByteBuff buffer) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      if (this.tagCompressionContext != null) {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        this.tagCompressionContext.clear();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      currentBuffer = buffer;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>      current.currentBuffer = currentBuffer;<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      if(tagCompressionContext != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>        current.tagCompressionContext = tagCompressionContext;<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>      decodeFirst();<a name="line.741"></a>
-<span class="sourceLineNo">742</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      previous.invalidate();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public Cell getKey() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      byte[] key = new byte[current.keyLength];<a name="line.748"></a>
-<span class="sourceLineNo">749</span>      System.arraycopy(current.keyBuffer, 0, key, 0, current.keyLength);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return new KeyValue.KeyOnlyKeyValue(key);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public ByteBuffer getValueShallowCopy() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      currentBuffer.asSubByteBuffer(current.valueOffset, current.valueLength, tmpPair);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      ByteBuffer dup = tmpPair.getFirst().duplicate();<a name="line.756"></a>
-<span class="sourceLineNo">757</span>      dup.position(tmpPair.getSecond());<a name="line.757"></a>
-<span class="sourceLineNo">758</span>      dup.limit(tmpPair.getSecond() + current.valueLength);<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      return dup.slice();<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    public Cell getCell() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return current.toCell();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span><a name="line.766"></a>
-<span class="sourceLineNo">767</span>    @Override<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    public void rewind() {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      currentBuffer.rewind();<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      if (tagCompressionContext != null) {<a name="line.770"></a>
-<span class="sourceLineNo">771</span>        tagCompressionContext.clear();<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      }<a name="line.772"></a>
-<span class="sourceLineNo">773</span>      decodeFirst();<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      previous.invalidate();<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public boolean next() {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      if (!currentBuffer.hasRemaining()) {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        return false;<a name="line.781"></a>
-<span class="sourceLineNo">782</span>      }<a name="line.782"></a>
-<span class="sourceLineNo">783</span>      decodeNext();<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      previous.invalidate();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>      return true;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span><a name="line.788"></a>
-<span class="sourceLineNo">789</span>    protected void decodeTags() {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      current.tagsLength = ByteBuff.readCompressedInt(currentBuffer);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      if (tagCompressionContext != null) {<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        if (current.uncompressTags) {<a name="line.792"></a>
-<span class="sourceLineNo">793</span>          // Tag compression is been used. uncompress it into tagsBuffer<a name="line.793"></a>
-<span class="sourceLineNo">794</span>          current.ensureSpaceForTags();<a name="line.794"></a>
-<span class="sourceLineNo">795</span>          try {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            current.tagsCompressedLength = tagCompressionContext.uncompressTags(currentBuffer,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>                current.tagsBuffer, 0, current.tagsLength);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>          } catch (IOException e) {<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            throw new RuntimeException("Exception while uncompressing tags", e);<a name="line.799"></a>
-<span class="sourceLineNo">800</span>          }<a name="line.800"></a>
-<span class="sourceLineNo">801</span>        } else {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>          currentBuffer.skip(current.tagsCompressedLength);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>          current.uncompressTags = true;// Reset this.<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        }<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        current.tagsOffset = -1;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>      } else {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        // When tag compress is not used, let us not do copying of tags bytes into tagsBuffer.<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // Just mark the tags Offset so as to create the KV buffer later in getKeyValueBuffer()<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        current.tagsOffset = currentBuffer.position();<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        currentBuffer.skip(current.tagsLength);<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      }<a name="line.811"></a>
-<span class="sourceLineNo">812</span>    }<a name="line.812"></a>
-<span class="sourceLineNo">813</span><a name="line.813"></a>
-<span class="sourceLineNo">814</span>    @Override<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      int rowCommonPrefix = 0;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      int familyCommonPrefix = 0;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      int qualCommonPrefix = 0;<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      previous.invalidate();<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      do {<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        int comp;<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.822"></a>
-<span class="sourceLineNo">823</span>        if (current.lastCommonPrefix != 0) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>          // The KV format has row key length also in the byte array. The<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          // common prefix<a name="line.825"></a>
-<span class="sourceLineNo">826</span>          // includes it. So we need to subtract to find out the common prefix<a name="line.826"></a>
-<span class="sourceLineNo">827</span>          // in the<a name="line.827"></a>
-<span class="sourceLineNo">828</span>          // row part alone<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          rowCommonPrefix = Math.min(rowCommonPrefix, current.lastCommonPrefix - 2);<a name="line.829"></a>
-<span class="sourceLineNo">830</span>        }<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        if (current.lastCommonPrefix &lt;= 2) {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>          rowCommonPrefix = 0;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>        }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        rowCommonPrefix += findCommonPrefixInRowPart(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        comp = compareCommonRowPrefix(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        if (comp == 0) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          comp = compareTypeBytes(seekCell, keyOnlyKV);<a name="line.837"></a>
-<span class="sourceLineNo">838</span>          if (comp == 0) {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>            // Subtract the fixed row key length and the family key fixed length<a name="line.839"></a>
-<span class="sourceLineNo">840</span>            familyCommonPrefix = Math.max(<a name="line.840"></a>
-<span class="sourceLineNo">841</span>                0,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>                Math.min(familyCommonPrefix,<a name="line.842"></a>
-<span class="sourceLineNo">843</span>                    current.lastCommonPrefix - (3 + keyOnlyKV.getRowLength())));<a name="line.843"></a>
-<span class="sourceLineNo">844</span>            familyCommonPrefix += findCommonPrefixInFamilyPart(seekCell, keyOnlyKV,<a name="line.844"></a>
-<span class="sourceLineNo">845</span>                familyCommonPrefix);<a name="line.845"></a>
-<span class="sourceLineNo">846</span>            comp = compareCommonFamilyPrefix(seekCell, keyOnlyKV, familyCommonPrefix);<a name="line.846"></a>
-<span class="sourceLineNo">847</span>            if (comp == 0) {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>              // subtract the rowkey fixed length and the family key fixed<a name="line.848"></a>
-<span class="sourceLineNo">849</span>              // length<a name="line.849"></a>
-<span class="sourceLineNo">850</span>              qualCommonPrefix = Math.max(<a name="line.850"></a>
-<span class="sourceLineNo">851</span>                  0,<a name="line.851"></a>
-<span class="sourceLineNo">852</span>                  Math.min(<a name="line.852"></a>
-<span class="sourceLineNo">853</span>                      qualCommonPrefix,<a name="line.853"></a>
-<span class="sourceLineNo">854</span>                      current.lastCommonPrefix<a name="line.854"></a>
-<span class="sourceLineNo">855</span>                          - (3 + keyOnlyKV.getRowLength() + keyOnlyKV.getFamilyLength())));<a name="line.855"></a>
-<span class="sourceLineNo">856</span>              qualCommonPrefix += findCommonPrefixInQualifierPart(seekCell, keyOnlyKV,<a name="line.856"></a>
-<span class="sourceLineNo">857</span>                  qualCommonPrefix);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>              comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>              if (comp == 0) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>                comp = CellComparator.compareTimestamps(seekCell, keyOnlyKV);<a name="line.860"></a>
-<span class="sourceLineNo">861</span>                if (comp == 0) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>                  // Compare types. Let the delete types sort ahead of puts;<a name="line.862"></a>
-<span class="sourceLineNo">863</span>                  // i.e. types<a name="line.863"></a>
-<span class="sourceLineNo">864</span>                  // of higher numbers sort before those of lesser numbers.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>                  // Maximum<a name="line.865"></a>
-<span class="sourceLineNo">866</span>                  // (255)<a name="line.866"></a>
-<span class="sourceLineNo">867</span>                  // appears ahead of everything, and minimum (0) appears<a name="line.867"></a>
-<span class="sourceLineNo">868</span>                  // after<a name="line.868"></a>
-<span class="sourceLineNo">869</span>                  // everything.<a name="line.869"></a>
-<span class="sourceLineNo">870</span>                  comp = (0xff &amp; keyOnlyKV.getTypeByte()) - (0xff &amp; seekCell.getTypeByte());<a name="line.870"></a>
-<span class="sourceLineNo">871</span>                }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>              }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            }<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        if (comp == 0) { // exact match<a name="line.876"></a>
-<span class="sourceLineNo">877</span>          if (seekBefore) {<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            if (!previous.isValid()) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>              // The caller (seekBefore) has to ensure that we are not at the<a name="line.879"></a>
-<span class="sourceLineNo">880</span>              // first key in the block.<a name="line.880"></a>
-<span class="sourceLineNo">881</span>              throw new IllegalStateException("Cannot seekBefore if "<a name="line.881"></a>
-<span class="sourceLineNo">882</span>                  + "positioned at the first key in the block: key="<a name="line.882"></a>
-<span class="sourceLineNo">883</span>                  + Bytes.toStringBinary(seekCell.getRowArray()));<a name="line.883"></a>
-<span class="sourceLineNo">884</span>            }<a name="line.884"></a>
-<span class="sourceLineNo">885</span>            moveToPrevious();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>            return 1;<a name="line.886"></a>
-<span class="sourceLineNo">887</span>          }<a name="line.887"></a>
-<span class="sourceLineNo">888</span>          return 0;<a name="line.888"></a>
-<span class="sourceLineNo">889</span>        }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>        if (comp &lt; 0) { // already too large, check previous<a name="line.891"></a>
-<span class="sourceLineNo">892</span>          if (previous.isValid()) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>            moveToPrevious();<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          } else {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>            return HConstants.INDEX_KEY_MAGIC; // using optimized index key<a name="line.895"></a>
-<span class="sourceLineNo">896</span>          }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>          return 1;<a name="line.897"></a>
-<span class="sourceLineNo">898</span>        }<a name="line.898"></a>
-<span class="sourceLineNo">899</span><a name="line.899"></a>
-<span class="sourceLineNo">900</span>        // move to next, if more data is available<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        if (currentBuffer.hasRemaining()) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          previous.copyFromNext(current);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>          decodeNext();<a name="line.903"></a>
-<span class="sourceLineNo">904</span>          current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.904"></a>
-<span class="sourceLineNo">905</span>        } else {<a name="line.905"></a>
-<span class="sourceLineNo">906</span>          break;<a name="line.906"></a>
-<span class="sourceLineNo">907</span>        }<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      } while (true);<a name="line.908"></a>
-<span class="sourceLineNo">909</span><a name="line.909"></a>
-<span class="sourceLineNo">910</span>      // we hit the end of the block, not an exact match<a name="line.910"></a>
-<span class="sourceLineNo">911</span>      return 1;<a name="line.911"></a>
-<span class="sourceLineNo">912</span>    }<a name="line.912"></a>
-<span class="sourceLineNo">913</span><a name="line.913"></a>
-<span class="sourceLineNo">914</span>    private int compareTypeBytes(Cell key, Cell right) {<a name="line.914"></a>
-<span class="sourceLineNo">915</span>      if (key.getFamilyLength() + key.getQualifierLength() == 0<a name="line.915"></a>
-<span class="sourceLineNo">916</span>          &amp;&amp; key.getTypeByte() == Type.Minimum.getCode()) {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>        // left is "bigger", i.e. it appears later in the sorted order<a name="line.917"></a>
-<span class="sourceLineNo">918</span>        return 1;<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      }<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      if (right.getFamilyLength() + right.getQualifierLength() == 0<a name="line.920"></a>
-<span class="sourceLineNo">921</span>          &amp;&amp; right.getTypeByte() == Type.Minimum.getCode()) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>        return -1;<a name="line.922"></a>
-<span class="sourceLineNo">923</span>      }<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      return 0;<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span><a name="line.926"></a>
-<span class="sourceLineNo">927</span>    private static int findCommonPrefixInRowPart(Cell left, Cell right, int rowCommonPrefix) {<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      return Bytes.findCommonPrefix(left.getRowArray(), right.getRowArray(), left.getRowLength()<a name="line.928"></a>
-<span class="sourceLineNo">929</span>          - rowCommonPrefix, right.getRowLength() - rowCommonPrefix, left.getRowOffset()<a name="line.929"></a>
-<span class="sourceLineNo">930</span>          + rowCommonPrefix, right.getRowOffset() + rowCommonPrefix);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    }<a name="line.931"></a>
-<span class="sourceLineNo">932</span><a name="line.932"></a>
-<span class="sourceLineNo">933</span>    private static int findCommonPrefixInFamilyPart(Cell left, Cell right, int familyCommonPrefix) {<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      return Bytes<a name="line.934"></a>
-<span class="sourceLineNo">935</span>          .findCommonPrefix(left.getFamilyArray(), right.getFamilyArray(), left.getFamilyLength()<a name="line.935"></a>
-<span class="sourceLineNo">936</span>              - familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix,<a name="line.936"></a>
-<span class="sourceLineNo">937</span>              left.getFamilyOffset() + familyCommonPrefix, right.getFamilyOffset()<a name="line.937"></a>
-<span class="sourceLineNo">938</span>                  + familyCommonPrefix);<a name="line.938"></a>
-<span class="sourceLineNo">939</span>    }<a name="line.939"></a>
-<span class="sourceLineNo">940</span><a name="line.940"></a>
-<span class="sourceLineNo">941</span>    private static int findCommonPrefixInQualifierPart(Cell left, Cell right,<a name="line.941"></a>
-<span class="sourceLineNo">942</span>        int qualifierCommonPrefix) {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      return Bytes.findCommonPrefix(left.getQualifierArray(), right.getQualifierArray(),<a name="line.943"></a>
-<span class="sourceLineNo">944</span>          left.getQualifierLength() - qualifierCommonPrefix, right.getQualifierLength()<a name="line.944"></a>
-<span class="sourceLineNo">945</span>              - qualifierCommonPrefix, left.getQualifierOffset() + qualifierCommonPrefix,<a name="line.945"></a>
-<span class="sourceLineNo">946</span>          right.getQualifierOffset() + qualifierCommonPrefix);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    private void moveToPrevious() {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!previous.isValid()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        throw new IllegalStateException(<a name="line.951"></a>
-<span class="sourceLineNo">952</span>            "Can move back only once and not in first key in the block.");<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      }<a name="line.953"></a>
-<span class="sourceLineNo">954</span><a name="line.954"></a>
-<span class="sourceLineNo">955</span>      STATE tmp = previous;<a name="line.955"></a>
-<span class="sourceLineNo">956</span>      previous = current;<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      current = tmp;<a name="line.957"></a>
-<span class="sourceLineNo">958</span><a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // move after last key value<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      currentBuffer.position(current.nextKvOffset);<a name="line.960"></a>
-<span class="sourceLi

<TRUNCATED>

[27/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index 86e9213..1be466e 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -140,9 +140,9 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="strong">AccessController.OpType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="strong">Permission.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="strong">AccessControlFilter.Strategy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="strong">Permission.Action</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="strong">AccessController.OpType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index 6aea795..4d2432f 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -152,9 +152,9 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="strong">SaslUtil.QualityOfProtection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="strong">AuthMethod</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="strong">SaslStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="strong">AuthMethod</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="strong">SaslUtil.QualityOfProtection</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/security/token/TokenUtil.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/token/TokenUtil.html b/devapidocs/org/apache/hadoop/hbase/security/token/TokenUtil.html
index 75f3555..9e9c684 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/token/TokenUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/token/TokenUtil.html
@@ -97,7 +97,7 @@
 <br>
 <pre><a href="../../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Public.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Public</a>
 <a href="../../../../../../org/apache/hadoop/hbase/classification/InterfaceStability.Evolving.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceStability.Evolving</a>
-public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.55">TokenUtil</a>
+public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.52">TokenUtil</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Utility methods for obtaining authentication tokens.</div>
 </li>
@@ -258,7 +258,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.57">LOG</a></pre>
+<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.54">LOG</a></pre>
 </li>
 </ul>
 </li>
@@ -275,7 +275,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TokenUtil</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.55">TokenUtil</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.52">TokenUtil</a>()</pre>
 </li>
 </ul>
 </li>
@@ -292,7 +292,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>obtainToken</h4>
-<pre>public static&nbsp;org.apache.hadoop.security.token.Token&lt;<a href="../../../../../../org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenIdentifier</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.64">obtainToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn)
+<pre>public static&nbsp;org.apache.hadoop.security.token.Token&lt;<a href="../../../../../../org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenIdentifier</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.61">obtainToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn)
                                                                                          throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Obtain and return an authentication token for the current user.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>conn</code> - The HBase cluster connection</dd>
@@ -307,7 +307,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>obtainToken</h4>
-<pre>public static&nbsp;org.apache.hadoop.security.token.Token&lt;<a href="../../../../../../org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenIdentifier</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.92">obtainToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public static&nbsp;org.apache.hadoop.security.token.Token&lt;<a href="../../../../../../org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenIdentifier</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.89">obtainToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                                                                                 <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                                                                          throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                                                                                 <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -325,7 +325,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterId</h4>
-<pre>private static&nbsp;org.apache.hadoop.io.Text&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.103">getClusterId</a>(org.apache.hadoop.security.token.Token&lt;<a href="../../../../../../org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenIdentifier</a>&gt;&nbsp;token)
+<pre>private static&nbsp;org.apache.hadoop.io.Text&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.100">getClusterId</a>(org.apache.hadoop.security.token.Token&lt;<a href="../../../../../../org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenIdentifier</a>&gt;&nbsp;token)
                                                throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -337,7 +337,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>obtainAndCacheToken</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.117">obtainAndCacheToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.114">obtainAndCacheToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                        <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                 throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                        <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -355,7 +355,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>obtainTokenForJob</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.152">obtainTokenForJob</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.149">obtainTokenForJob</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                      <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user,
                      org.apache.hadoop.mapreduce.Job&nbsp;job)
                               throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
@@ -374,7 +374,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>obtainTokenForJob</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.188">obtainTokenForJob</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.185">obtainTokenForJob</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                      org.apache.hadoop.mapred.JobConf&nbsp;job,
                      <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                               throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
@@ -393,7 +393,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>addTokenForJob</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.224">addTokenForJob</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.221">addTokenForJob</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                   org.apache.hadoop.mapred.JobConf&nbsp;job,
                   <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
@@ -412,7 +412,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>addTokenForJob</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.244">addTokenForJob</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.241">addTokenForJob</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                   <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user,
                   org.apache.hadoop.mapreduce.Job&nbsp;job)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
@@ -431,7 +431,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>addTokenIfMissing</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.263">addTokenIfMissing</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.260">addTokenIfMissing</a>(<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                         <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                  throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                         <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -450,7 +450,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getAuthToken</h4>
-<pre>private static&nbsp;org.apache.hadoop.security.token.Token&lt;<a href="../../../../../../org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenIdentifier</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.278">getAuthToken</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;org.apache.hadoop.security.token.Token&lt;<a href="../../../../../../org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenIdentifier</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TokenUtil.html#line.275">getAuthToken</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                                                  <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                                                                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                                                                                   <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 3936917..92b652b 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -159,9 +159,9 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="strong">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="strong">ThriftMetrics.ThriftServerType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="strong">ThriftServerRunner.ImplType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="strong">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index e8b3fdf..1b759e6 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -379,22 +379,22 @@ extends org.jamon.AbstractTemplateProxy.ImplData</pre>
 <pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.110">m_master</a></pre>
 </li>
 </ul>
-<a name="m_catalogJanitorEnabled">
+<a name="m_filter">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_catalogJanitorEnabled</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.122">m_catalogJanitorEnabled</a></pre>
+<h4>m_filter</h4>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.122">m_filter</a></pre>
 </li>
 </ul>
-<a name="m_catalogJanitorEnabled__IsNotDefault">
+<a name="m_filter__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_catalogJanitorEnabled__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.127">m_catalogJanitorEnabled__IsNotDefault</a></pre>
+<h4>m_filter__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.127">m_filter__IsNotDefault</a></pre>
 </li>
 </ul>
 <a name="m_format">
@@ -415,130 +415,130 @@ extends org.jamon.AbstractTemplateProxy.ImplData</pre>
 <pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.144">m_format__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_metaLocation">
+<a name="m_assignmentManager">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_metaLocation</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.156">m_metaLocation</a></pre>
+<h4>m_assignmentManager</h4>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.156">m_assignmentManager</a></pre>
 </li>
 </ul>
-<a name="m_metaLocation__IsNotDefault">
+<a name="m_assignmentManager__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_metaLocation__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.161">m_metaLocation__IsNotDefault</a></pre>
+<h4>m_assignmentManager__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.161">m_assignmentManager__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_servers">
+<a name="m_metaLocation">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_servers</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.173">m_servers</a></pre>
+<h4>m_metaLocation</h4>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.173">m_metaLocation</a></pre>
 </li>
 </ul>
-<a name="m_servers__IsNotDefault">
+<a name="m_metaLocation__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_servers__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.178">m_servers__IsNotDefault</a></pre>
+<h4>m_metaLocation__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.178">m_metaLocation__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_deadServers">
+<a name="m_frags">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_deadServers</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.190">m_deadServers</a></pre>
+<h4>m_frags</h4>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.190">m_frags</a></pre>
 </li>
 </ul>
-<a name="m_deadServers__IsNotDefault">
+<a name="m_frags__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_deadServers__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.195">m_deadServers__IsNotDefault</a></pre>
+<h4>m_frags__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.195">m_frags__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_filter">
+<a name="m_serverManager">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_filter</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.207">m_filter</a></pre>
+<h4>m_serverManager</h4>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.207">m_serverManager</a></pre>
 </li>
 </ul>
-<a name="m_filter__IsNotDefault">
+<a name="m_serverManager__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_filter__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.212">m_filter__IsNotDefault</a></pre>
+<h4>m_serverManager__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.212">m_serverManager__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_assignmentManager">
+<a name="m_deadServers">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_assignmentManager</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.224">m_assignmentManager</a></pre>
+<h4>m_deadServers</h4>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.224">m_deadServers</a></pre>
 </li>
 </ul>
-<a name="m_assignmentManager__IsNotDefault">
+<a name="m_deadServers__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_assignmentManager__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.229">m_assignmentManager__IsNotDefault</a></pre>
+<h4>m_deadServers__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.229">m_deadServers__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_serverManager">
+<a name="m_catalogJanitorEnabled">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_serverManager</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.241">m_serverManager</a></pre>
+<h4>m_catalogJanitorEnabled</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.241">m_catalogJanitorEnabled</a></pre>
 </li>
 </ul>
-<a name="m_serverManager__IsNotDefault">
+<a name="m_catalogJanitorEnabled__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_serverManager__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.246">m_serverManager__IsNotDefault</a></pre>
+<h4>m_catalogJanitorEnabled__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.246">m_catalogJanitorEnabled__IsNotDefault</a></pre>
 </li>
 </ul>
-<a name="m_frags">
+<a name="m_servers">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>m_frags</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.258">m_frags</a></pre>
+<h4>m_servers</h4>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.258">m_servers</a></pre>
 </li>
 </ul>
-<a name="m_frags__IsNotDefault">
+<a name="m_servers__IsNotDefault">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>m_frags__IsNotDefault</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.263">m_frags__IsNotDefault</a></pre>
+<h4>m_servers__IsNotDefault</h4>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.263">m_servers__IsNotDefault</a></pre>
 </li>
 </ul>
 </li>
@@ -584,31 +584,31 @@ extends org.jamon.AbstractTemplateProxy.ImplData</pre>
 <pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.106">getMaster</a>()</pre>
 </li>
 </ul>
-<a name="setCatalogJanitorEnabled(boolean)">
+<a name="setFilter(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setCatalogJanitorEnabled</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.112">setCatalogJanitorEnabled</a>(boolean&nbsp;catalogJanitorEnabled)</pre>
+<h4>setFilter</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.112">setFilter</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;filter)</pre>
 </li>
 </ul>
-<a name="getCatalogJanitorEnabled()">
+<a name="getFilter()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getCatalogJanitorEnabled</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.118">getCatalogJanitorEnabled</a>()</pre>
+<h4>getFilter</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.118">getFilter</a>()</pre>
 </li>
 </ul>
-<a name="getCatalogJanitorEnabled__IsNotDefault()">
+<a name="getFilter__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getCatalogJanitorEnabled__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.123">getCatalogJanitorEnabled__IsNotDefault</a>()</pre>
+<h4>getFilter__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.123">getFilter__IsNotDefault</a>()</pre>
 </li>
 </ul>
 <a name="setFormat(java.lang.String)">
@@ -638,193 +638,193 @@ extends org.jamon.AbstractTemplateProxy.ImplData</pre>
 <pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.140">getFormat__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setMetaLocation(org.apache.hadoop.hbase.ServerName)">
+<a name="setAssignmentManager(org.apache.hadoop.hbase.master.AssignmentManager)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setMetaLocation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.146">setMetaLocation</a>(<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;metaLocation)</pre>
+<h4>setAssignmentManager</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.146">setAssignmentManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a>&nbsp;assignmentManager)</pre>
 </li>
 </ul>
-<a name="getMetaLocation()">
+<a name="getAssignmentManager()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getMetaLocation</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.152">getMetaLocation</a>()</pre>
+<h4>getAssignmentManager</h4>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.152">getAssignmentManager</a>()</pre>
 </li>
 </ul>
-<a name="getMetaLocation__IsNotDefault()">
+<a name="getAssignmentManager__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getMetaLocation__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.157">getMetaLocation__IsNotDefault</a>()</pre>
+<h4>getAssignmentManager__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.157">getAssignmentManager__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setServers(java.util.List)">
+<a name="setMetaLocation(org.apache.hadoop.hbase.ServerName)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setServers</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.163">setServers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;servers)</pre>
+<h4>setMetaLocation</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.163">setMetaLocation</a>(<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;metaLocation)</pre>
 </li>
 </ul>
-<a name="getServers()">
+<a name="getMetaLocation()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getServers</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.169">getServers</a>()</pre>
+<h4>getMetaLocation</h4>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.169">getMetaLocation</a>()</pre>
 </li>
 </ul>
-<a name="getServers__IsNotDefault()">
+<a name="getMetaLocation__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getServers__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.174">getServers__IsNotDefault</a>()</pre>
+<h4>getMetaLocation__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.174">getMetaLocation__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setDeadServers(java.util.Set)">
+<a name="setFrags(java.util.Map)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setDeadServers</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.180">setDeadServers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;deadServers)</pre>
+<h4>setFrags</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.180">setFrags</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;frags)</pre>
 </li>
 </ul>
-<a name="getDeadServers()">
+<a name="getFrags()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getDeadServers</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.186">getDeadServers</a>()</pre>
+<h4>getFrags</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.186">getFrags</a>()</pre>
 </li>
 </ul>
-<a name="getDeadServers__IsNotDefault()">
+<a name="getFrags__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getDeadServers__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.191">getDeadServers__IsNotDefault</a>()</pre>
+<h4>getFrags__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.191">getFrags__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setFilter(java.lang.String)">
+<a name="setServerManager(org.apache.hadoop.hbase.master.ServerManager)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setFilter</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.197">setFilter</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;filter)</pre>
+<h4>setServerManager</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.197">setServerManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a>&nbsp;serverManager)</pre>
 </li>
 </ul>
-<a name="getFilter()">
+<a name="getServerManager()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getFilter</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.203">getFilter</a>()</pre>
+<h4>getServerManager</h4>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.203">getServerManager</a>()</pre>
 </li>
 </ul>
-<a name="getFilter__IsNotDefault()">
+<a name="getServerManager__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getFilter__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.208">getFilter__IsNotDefault</a>()</pre>
+<h4>getServerManager__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.208">getServerManager__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setAssignmentManager(org.apache.hadoop.hbase.master.AssignmentManager)">
+<a name="setDeadServers(java.util.Set)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setAssignmentManager</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.214">setAssignmentManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a>&nbsp;assignmentManager)</pre>
+<h4>setDeadServers</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.214">setDeadServers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;deadServers)</pre>
 </li>
 </ul>
-<a name="getAssignmentManager()">
+<a name="getDeadServers()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getAssignmentManager</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.220">getAssignmentManager</a>()</pre>
+<h4>getDeadServers</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.220">getDeadServers</a>()</pre>
 </li>
 </ul>
-<a name="getAssignmentManager__IsNotDefault()">
+<a name="getDeadServers__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getAssignmentManager__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.225">getAssignmentManager__IsNotDefault</a>()</pre>
+<h4>getDeadServers__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.225">getDeadServers__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setServerManager(org.apache.hadoop.hbase.master.ServerManager)">
+<a name="setCatalogJanitorEnabled(boolean)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setServerManager</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.231">setServerManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a>&nbsp;serverManager)</pre>
+<h4>setCatalogJanitorEnabled</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.231">setCatalogJanitorEnabled</a>(boolean&nbsp;catalogJanitorEnabled)</pre>
 </li>
 </ul>
-<a name="getServerManager()">
+<a name="getCatalogJanitorEnabled()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getServerManager</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.237">getServerManager</a>()</pre>
+<h4>getCatalogJanitorEnabled</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.237">getCatalogJanitorEnabled</a>()</pre>
 </li>
 </ul>
-<a name="getServerManager__IsNotDefault()">
+<a name="getCatalogJanitorEnabled__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getServerManager__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.242">getServerManager__IsNotDefault</a>()</pre>
+<h4>getCatalogJanitorEnabled__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.242">getCatalogJanitorEnabled__IsNotDefault</a>()</pre>
 </li>
 </ul>
-<a name="setFrags(java.util.Map)">
+<a name="setServers(java.util.List)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setFrags</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.248">setFrags</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;frags)</pre>
+<h4>setServers</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.248">setServers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;servers)</pre>
 </li>
 </ul>
-<a name="getFrags()">
+<a name="getServers()">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>getFrags</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.254">getFrags</a>()</pre>
+<h4>getServers</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.254">getServers</a>()</pre>
 </li>
 </ul>
-<a name="getFrags__IsNotDefault()">
+<a name="getServers__IsNotDefault()">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>getFrags__IsNotDefault</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.259">getFrags__IsNotDefault</a>()</pre>
+<h4>getServers__IsNotDefault</h4>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html#line.259">getServers__IsNotDefault</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index c971dea..76a14d2 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -323,13 +323,13 @@ extends org.jamon.AbstractTemplateProxy</pre>
 <!--   -->
 </a>
 <h3>Field Detail</h3>
-<a name="catalogJanitorEnabled">
+<a name="filter">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>catalogJanitorEnabled</h4>
-<pre>protected&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.275">catalogJanitorEnabled</a></pre>
+<h4>filter</h4>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.275">filter</a></pre>
 </li>
 </ul>
 <a name="format">
@@ -341,67 +341,67 @@ extends org.jamon.AbstractTemplateProxy</pre>
 <pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.282">format</a></pre>
 </li>
 </ul>
-<a name="metaLocation">
+<a name="assignmentManager">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>metaLocation</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.289">metaLocation</a></pre>
+<h4>assignmentManager</h4>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.289">assignmentManager</a></pre>
 </li>
 </ul>
-<a name="servers">
+<a name="metaLocation">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>servers</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.296">servers</a></pre>
+<h4>metaLocation</h4>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.296">metaLocation</a></pre>
 </li>
 </ul>
-<a name="deadServers">
+<a name="frags">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>deadServers</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.303">deadServers</a></pre>
+<h4>frags</h4>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.303">frags</a></pre>
 </li>
 </ul>
-<a name="filter">
+<a name="serverManager">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>filter</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.310">filter</a></pre>
+<h4>serverManager</h4>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.310">serverManager</a></pre>
 </li>
 </ul>
-<a name="assignmentManager">
+<a name="deadServers">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>assignmentManager</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.317">assignmentManager</a></pre>
+<h4>deadServers</h4>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.317">deadServers</a></pre>
 </li>
 </ul>
-<a name="serverManager">
+<a name="catalogJanitorEnabled">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>serverManager</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.324">serverManager</a></pre>
+<h4>catalogJanitorEnabled</h4>
+<pre>protected&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.324">catalogJanitorEnabled</a></pre>
 </li>
 </ul>
-<a name="frags">
+<a name="servers">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>frags</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.331">frags</a></pre>
+<h4>servers</h4>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.331">servers</a></pre>
 </li>
 </ul>
 </li>
@@ -473,13 +473,13 @@ extends org.jamon.AbstractTemplateProxy</pre>
 </dl>
 </li>
 </ul>
-<a name="setCatalogJanitorEnabled(boolean)">
+<a name="setFilter(java.lang.String)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setCatalogJanitorEnabled</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.276">setCatalogJanitorEnabled</a>(boolean&nbsp;p_catalogJanitorEnabled)</pre>
+<h4>setFilter</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.276">setFilter</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_filter)</pre>
 </li>
 </ul>
 <a name="setFormat(java.lang.String)">
@@ -491,67 +491,67 @@ extends org.jamon.AbstractTemplateProxy</pre>
 <pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.283">setFormat</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_format)</pre>
 </li>
 </ul>
-<a name="setMetaLocation(org.apache.hadoop.hbase.ServerName)">
+<a name="setAssignmentManager(org.apache.hadoop.hbase.master.AssignmentManager)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setMetaLocation</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.290">setMetaLocation</a>(<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;p_metaLocation)</pre>
+<h4>setAssignmentManager</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.290">setAssignmentManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a>&nbsp;p_assignmentManager)</pre>
 </li>
 </ul>
-<a name="setServers(java.util.List)">
+<a name="setMetaLocation(org.apache.hadoop.hbase.ServerName)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setServers</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.297">setServers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;p_servers)</pre>
+<h4>setMetaLocation</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.297">setMetaLocation</a>(<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;p_metaLocation)</pre>
 </li>
 </ul>
-<a name="setDeadServers(java.util.Set)">
+<a name="setFrags(java.util.Map)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setDeadServers</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.304">setDeadServers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;p_deadServers)</pre>
+<h4>setFrags</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.304">setFrags</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;p_frags)</pre>
 </li>
 </ul>
-<a name="setFilter(java.lang.String)">
+<a name="setServerManager(org.apache.hadoop.hbase.master.ServerManager)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setFilter</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.311">setFilter</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;p_filter)</pre>
+<h4>setServerManager</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.311">setServerManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a>&nbsp;p_serverManager)</pre>
 </li>
 </ul>
-<a name="setAssignmentManager(org.apache.hadoop.hbase.master.AssignmentManager)">
+<a name="setDeadServers(java.util.Set)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setAssignmentManager</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.318">setAssignmentManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a>&nbsp;p_assignmentManager)</pre>
+<h4>setDeadServers</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.318">setDeadServers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;p_deadServers)</pre>
 </li>
 </ul>
-<a name="setServerManager(org.apache.hadoop.hbase.master.ServerManager)">
+<a name="setCatalogJanitorEnabled(boolean)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setServerManager</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.325">setServerManager</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a>&nbsp;p_serverManager)</pre>
+<h4>setCatalogJanitorEnabled</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.325">setCatalogJanitorEnabled</a>(boolean&nbsp;p_catalogJanitorEnabled)</pre>
 </li>
 </ul>
-<a name="setFrags(java.util.Map)">
+<a name="setServers(java.util.List)">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setFrags</h4>
-<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.332">setFrags</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;p_frags)</pre>
+<h4>setServers</h4>
+<pre>public final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html" title="class in org.apache.hadoop.hbase.tmpl.master">MasterStatusTmpl</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html#line.332">setServers</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;p_servers)</pre>
 </li>
 </ul>
 <a name="constructImpl(java.lang.Class)">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html
index 3af4ba9..03f83df 100644
--- a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html
@@ -264,13 +264,13 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/Master
 <pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.62">master</a></pre>
 </li>
 </ul>
-<a name="catalogJanitorEnabled">
+<a name="filter">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>catalogJanitorEnabled</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.63">catalogJanitorEnabled</a></pre>
+<h4>filter</h4>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.63">filter</a></pre>
 </li>
 </ul>
 <a name="format">
@@ -282,67 +282,67 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/tmpl/master/Master
 <pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.64">format</a></pre>
 </li>
 </ul>
-<a name="metaLocation">
+<a name="assignmentManager">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>metaLocation</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.65">metaLocation</a></pre>
+<h4>assignmentManager</h4>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.65">assignmentManager</a></pre>
 </li>
 </ul>
-<a name="servers">
+<a name="metaLocation">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>servers</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.66">servers</a></pre>
+<h4>metaLocation</h4>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.66">metaLocation</a></pre>
 </li>
 </ul>
-<a name="deadServers">
+<a name="frags">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>deadServers</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.67">deadServers</a></pre>
+<h4>frags</h4>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.67">frags</a></pre>
 </li>
 </ul>
-<a name="filter">
+<a name="serverManager">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>filter</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.68">filter</a></pre>
+<h4>serverManager</h4>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.68">serverManager</a></pre>
 </li>
 </ul>
-<a name="assignmentManager">
+<a name="deadServers">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>assignmentManager</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master">AssignmentManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.69">assignmentManager</a></pre>
+<h4>deadServers</h4>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.69">deadServers</a></pre>
 </li>
 </ul>
-<a name="serverManager">
+<a name="catalogJanitorEnabled">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>serverManager</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/master/ServerManager.html" title="class in org.apache.hadoop.hbase.master">ServerManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.70">serverManager</a></pre>
+<h4>catalogJanitorEnabled</h4>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.70">catalogJanitorEnabled</a></pre>
 </li>
 </ul>
-<a name="frags">
+<a name="servers">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>frags</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.71">frags</a></pre>
+<h4>servers</h4>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmplImpl.html#line.71">servers</a></pre>
 </li>
 </ul>
 </li>


[43/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/HConstants.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index 57e8154..ac6c3a2 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1959,7 +1959,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.71">HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.73">HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM</a></pre>
 <div class="block">The size data structures with minor version is 0</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM">Constant Field Values</a></dd></dl>
 </li>
@@ -1970,7 +1970,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HFILEBLOCK_HEADER_SIZE</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.77">HFILEBLOCK_HEADER_SIZE</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.79">HFILEBLOCK_HEADER_SIZE</a></pre>
 <div class="block">The size of a version 2 HFile block header, minor version 1.
  There is a 1 byte checksum type, followed by a 4 byte bytesPerChecksum
  followed by another 4 byte value to store sizeofDataOnDisk.</div>
@@ -1983,7 +1983,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HFILEBLOCK_DUMMY_HEADER</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.80">HFILEBLOCK_DUMMY_HEADER</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.82">HFILEBLOCK_DUMMY_HEADER</a></pre>
 <div class="block">Just an array of bytes of the right size.</div>
 </li>
 </ul>
@@ -1993,7 +1993,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZERO_L</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.97">ZERO_L</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.99">ZERO_L</a></pre>
 <div class="block">long constant for zero</div>
 </li>
 </ul>
@@ -2003,7 +2003,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>NINES</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.98">NINES</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.100">NINES</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.NINES">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2013,7 +2013,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZEROES</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.99">ZEROES</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.101">ZEROES</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZEROES">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2023,7 +2023,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>VERSION_FILE_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.104">VERSION_FILE_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.106">VERSION_FILE_NAME</a></pre>
 <div class="block">name of version file</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.VERSION_FILE_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2034,7 +2034,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>FILE_SYSTEM_VERSION</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.115">FILE_SYSTEM_VERSION</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.117">FILE_SYSTEM_VERSION</a></pre>
 <div class="block">Current version of file system.
  Version 4 supports only one kind of bloom filter.
  Version 5 changes versions in catalog table regions.
@@ -2050,7 +2050,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_DISTRIBUTED</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.122">CLUSTER_DISTRIBUTED</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.124">CLUSTER_DISTRIBUTED</a></pre>
 <div class="block">Cluster is in distributed mode or not</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_DISTRIBUTED">Constant Field Values</a></dd></dl>
 </li>
@@ -2061,7 +2061,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_MASTER_LOADBALANCER_CLASS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.125">HBASE_MASTER_LOADBALANCER_CLASS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.127">HBASE_MASTER_LOADBALANCER_CLASS</a></pre>
 <div class="block">Config for pluggable load balancers</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOADBALANCER_CLASS">Constant Field Values</a></dd></dl>
 </li>
@@ -2072,7 +2072,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_MASTER_LOADBALANCE_BYTABLE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.128">HBASE_MASTER_LOADBALANCE_BYTABLE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.130">HBASE_MASTER_LOADBALANCE_BYTABLE</a></pre>
 <div class="block">Config for balancing the cluster by table</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOADBALANCE_BYTABLE">Constant Field Values</a></dd></dl>
 </li>
@@ -2083,7 +2083,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ENSEMBLE_TABLE_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.131">ENSEMBLE_TABLE_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.133">ENSEMBLE_TABLE_NAME</a></pre>
 <div class="block">The name of the ensemble table</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ENSEMBLE_TABLE_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2094,7 +2094,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_MASTER_NORMALIZER_CLASS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.134">HBASE_MASTER_NORMALIZER_CLASS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.136">HBASE_MASTER_NORMALIZER_CLASS</a></pre>
 <div class="block">Config for pluggable region normalizer</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_MASTER_NORMALIZER_CLASS">Constant Field Values</a></dd></dl>
 </li>
@@ -2105,7 +2105,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_IS_LOCAL</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.138">CLUSTER_IS_LOCAL</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.140">CLUSTER_IS_LOCAL</a></pre>
 <div class="block">Cluster is standalone or pseudo-distributed</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_IS_LOCAL">Constant Field Values</a></dd></dl>
 </li>
@@ -2116,7 +2116,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_IS_DISTRIBUTED</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.141">CLUSTER_IS_DISTRIBUTED</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.143">CLUSTER_IS_DISTRIBUTED</a></pre>
 <div class="block">Cluster is fully-distributed</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_IS_DISTRIBUTED">Constant Field Values</a></dd></dl>
 </li>
@@ -2127,7 +2127,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CLUSTER_DISTRIBUTED</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.144">DEFAULT_CLUSTER_DISTRIBUTED</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.146">DEFAULT_CLUSTER_DISTRIBUTED</a></pre>
 <div class="block">Default value for cluster distributed mode</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_CLUSTER_DISTRIBUTED">Constant Field Values</a></dd></dl>
 </li>
@@ -2138,7 +2138,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HOST</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.147">DEFAULT_HOST</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.149">DEFAULT_HOST</a></pre>
 <div class="block">default host address</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HOST">Constant Field Values</a></dd></dl>
 </li>
@@ -2149,7 +2149,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MASTER_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.150">MASTER_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.152">MASTER_PORT</a></pre>
 <div class="block">Parameter name for port master listens on.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MASTER_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2160,7 +2160,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MASTER_PORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.153">DEFAULT_MASTER_PORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.155">DEFAULT_MASTER_PORT</a></pre>
 <div class="block">default port that the master listens on</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_MASTER_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2171,7 +2171,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MASTER_INFOPORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.156">DEFAULT_MASTER_INFOPORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.158">DEFAULT_MASTER_INFOPORT</a></pre>
 <div class="block">default port for master web api</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_MASTER_INFOPORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2182,7 +2182,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MASTER_INFO_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.159">MASTER_INFO_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.161">MASTER_INFO_PORT</a></pre>
 <div class="block">Configuration key for master web API port</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MASTER_INFO_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2193,7 +2193,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MASTER_TYPE_BACKUP</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.162">MASTER_TYPE_BACKUP</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.164">MASTER_TYPE_BACKUP</a></pre>
 <div class="block">Parameter name for the master type being backup (waits for primary to go inactive).</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MASTER_TYPE_BACKUP">Constant Field Values</a></dd></dl>
 </li>
@@ -2204,7 +2204,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MASTER_TYPE_BACKUP</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.167">DEFAULT_MASTER_TYPE_BACKUP</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.169">DEFAULT_MASTER_TYPE_BACKUP</a></pre>
 <div class="block">by default every master is a possible primary master unless the conf explicitly overrides it</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_MASTER_TYPE_BACKUP">Constant Field Values</a></dd></dl>
 </li>
@@ -2215,7 +2215,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_QUORUM</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.170">ZOOKEEPER_QUORUM</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.172">ZOOKEEPER_QUORUM</a></pre>
 <div class="block">Name of ZooKeeper quorum configuration parameter.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_QUORUM">Constant Field Values</a></dd></dl>
 </li>
@@ -2226,7 +2226,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZK_CFG_PROPERTY_PREFIX</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.173">ZK_CFG_PROPERTY_PREFIX</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.175">ZK_CFG_PROPERTY_PREFIX</a></pre>
 <div class="block">Common prefix of ZooKeeper configuration properties</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZK_CFG_PROPERTY_PREFIX">Constant Field Values</a></dd></dl>
 </li>
@@ -2237,7 +2237,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZK_CFG_PROPERTY_PREFIX_LEN</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.176">ZK_CFG_PROPERTY_PREFIX_LEN</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.178">ZK_CFG_PROPERTY_PREFIX_LEN</a></pre>
 </li>
 </ul>
 <a name="CLIENT_PORT_STR">
@@ -2246,7 +2246,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLIENT_PORT_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.183">CLIENT_PORT_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.185">CLIENT_PORT_STR</a></pre>
 <div class="block">The ZK client port key in the ZK properties map. The name reflects the
  fact that this is not an HBase configuration key.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLIENT_PORT_STR">Constant Field Values</a></dd></dl>
@@ -2258,7 +2258,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_CLIENT_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.186">ZOOKEEPER_CLIENT_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.188">ZOOKEEPER_CLIENT_PORT</a></pre>
 <div class="block">Parameter name for the client port that the zookeeper listens on</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_CLIENT_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2269,7 +2269,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZOOKEPER_CLIENT_PORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.190">DEFAULT_ZOOKEPER_CLIENT_PORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.192">DEFAULT_ZOOKEPER_CLIENT_PORT</a></pre>
 <div class="block">Default client port that the zookeeper listens on</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEPER_CLIENT_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2280,7 +2280,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_RECOVERABLE_WAITTIME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.195">ZOOKEEPER_RECOVERABLE_WAITTIME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.197">ZOOKEEPER_RECOVERABLE_WAITTIME</a></pre>
 <div class="block">Parameter name for the wait time for the recoverable zookeeper</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_RECOVERABLE_WAITTIME">Constant Field Values</a></dd></dl>
 </li>
@@ -2291,7 +2291,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME</h4>
-<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.199">DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.201">DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME</a></pre>
 <div class="block">Default wait time for the recoverable zookeeper</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME">Constant Field Values</a></dd></dl>
 </li>
@@ -2302,7 +2302,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_ZNODE_PARENT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.202">ZOOKEEPER_ZNODE_PARENT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.204">ZOOKEEPER_ZNODE_PARENT</a></pre>
 <div class="block">Parameter name for the root dir in ZK for this cluster</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_ZNODE_PARENT">Constant Field Values</a></dd></dl>
 </li>
@@ -2313,7 +2313,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZOOKEEPER_ZNODE_PARENT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.204">DEFAULT_ZOOKEEPER_ZNODE_PARENT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.206">DEFAULT_ZOOKEEPER_ZNODE_PARENT</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2323,7 +2323,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_MAX_CLIENT_CNXNS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.210">ZOOKEEPER_MAX_CLIENT_CNXNS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.212">ZOOKEEPER_MAX_CLIENT_CNXNS</a></pre>
 <div class="block">Parameter name for the limit on concurrent client-side zookeeper
  connections</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_MAX_CLIENT_CNXNS">Constant Field Values</a></dd></dl>
@@ -2335,7 +2335,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_DATA_DIR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.214">ZOOKEEPER_DATA_DIR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.216">ZOOKEEPER_DATA_DIR</a></pre>
 <div class="block">Parameter name for the ZK data directory</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_DATA_DIR">Constant Field Values</a></dd></dl>
 </li>
@@ -2346,7 +2346,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_TICK_TIME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.218">ZOOKEEPER_TICK_TIME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.220">ZOOKEEPER_TICK_TIME</a></pre>
 <div class="block">Parameter name for the ZK tick time</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_TICK_TIME">Constant Field Values</a></dd></dl>
 </li>
@@ -2357,7 +2357,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.222">DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.224">DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS</a></pre>
 <div class="block">Default limit on concurrent client-side zookeeper connections</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEPER_MAX_CLIENT_CNXNS">Constant Field Values</a></dd></dl>
 </li>
@@ -2368,7 +2368,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZK_SESSION_TIMEOUT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.225">ZK_SESSION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.227">ZK_SESSION_TIMEOUT</a></pre>
 <div class="block">Configuration key for ZooKeeper session timeout</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZK_SESSION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2379,7 +2379,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ZK_SESSION_TIMEOUT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.228">DEFAULT_ZK_SESSION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.230">DEFAULT_ZK_SESSION_TIMEOUT</a></pre>
 <div class="block">Default value for ZooKeeper session timeout</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_ZK_SESSION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2390,7 +2390,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ZOOKEEPER_USEMULTI</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.231">ZOOKEEPER_USEMULTI</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.233">ZOOKEEPER_USEMULTI</a></pre>
 <div class="block">Configuration key for whether to use ZK.multi</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.ZOOKEEPER_USEMULTI">Constant Field Values</a></dd></dl>
 </li>
@@ -2401,7 +2401,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONSERVER_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.234">REGIONSERVER_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.236">REGIONSERVER_PORT</a></pre>
 <div class="block">Parameter name for port region server listens on.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGIONSERVER_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2412,7 +2412,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_REGIONSERVER_PORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.237">DEFAULT_REGIONSERVER_PORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.239">DEFAULT_REGIONSERVER_PORT</a></pre>
 <div class="block">Default port region server listens on.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_REGIONSERVER_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2423,7 +2423,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_REGIONSERVER_INFOPORT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.240">DEFAULT_REGIONSERVER_INFOPORT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.242">DEFAULT_REGIONSERVER_INFOPORT</a></pre>
 <div class="block">default port for region server web api</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_REGIONSERVER_INFOPORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2434,7 +2434,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONSERVER_INFO_PORT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.243">REGIONSERVER_INFO_PORT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.245">REGIONSERVER_INFO_PORT</a></pre>
 <div class="block">A configuration key for regionserver info port</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGIONSERVER_INFO_PORT">Constant Field Values</a></dd></dl>
 </li>
@@ -2445,7 +2445,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONSERVER_INFO_PORT_AUTO</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.247">REGIONSERVER_INFO_PORT_AUTO</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.249">REGIONSERVER_INFO_PORT_AUTO</a></pre>
 <div class="block">A flag that enables automatic selection of regionserver info port</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGIONSERVER_INFO_PORT_AUTO">Constant Field Values</a></dd></dl>
 </li>
@@ -2456,7 +2456,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_SERVER_IMPL</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.251">REGION_SERVER_IMPL</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.253">REGION_SERVER_IMPL</a></pre>
 <div class="block">Parameter name for what region server implementation to use.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGION_SERVER_IMPL">Constant Field Values</a></dd></dl>
 </li>
@@ -2467,7 +2467,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MASTER_IMPL</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.254">MASTER_IMPL</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.256">MASTER_IMPL</a></pre>
 <div class="block">Parameter name for what master implementation to use.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MASTER_IMPL">Constant Field Values</a></dd></dl>
 </li>
@@ -2478,7 +2478,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASECLIENT_IMPL</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.257">HBASECLIENT_IMPL</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.259">HBASECLIENT_IMPL</a></pre>
 <div class="block">Parameter name for what hbase client implementation to use.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASECLIENT_IMPL">Constant Field Values</a></dd></dl>
 </li>
@@ -2489,7 +2489,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>THREAD_WAKE_FREQUENCY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.260">THREAD_WAKE_FREQUENCY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.262">THREAD_WAKE_FREQUENCY</a></pre>
 <div class="block">Parameter name for how often threads should wake up</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.THREAD_WAKE_FREQUENCY">Constant Field Values</a></dd></dl>
 </li>
@@ -2500,7 +2500,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_THREAD_WAKE_FREQUENCY</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.263">DEFAULT_THREAD_WAKE_FREQUENCY</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.265">DEFAULT_THREAD_WAKE_FREQUENCY</a></pre>
 <div class="block">Default value for thread wake frequency</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_THREAD_WAKE_FREQUENCY">Constant Field Values</a></dd></dl>
 </li>
@@ -2511,7 +2511,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>VERSION_FILE_WRITE_ATTEMPTS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.266">VERSION_FILE_WRITE_ATTEMPTS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.268">VERSION_FILE_WRITE_ATTEMPTS</a></pre>
 <div class="block">Parameter name for how often we should try to write a version file, before failing</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.VERSION_FILE_WRITE_ATTEMPTS">Constant Field Values</a></dd></dl>
 </li>
@@ -2522,7 +2522,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_VERSION_FILE_WRITE_ATTEMPTS</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.269">DEFAULT_VERSION_FILE_WRITE_ATTEMPTS</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.271">DEFAULT_VERSION_FILE_WRITE_ATTEMPTS</a></pre>
 <div class="block">Parameter name for how often we should try to write a version file, before failing</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS">Constant Field Values</a></dd></dl>
 </li>
@@ -2533,7 +2533,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MAJOR_COMPACTION_PERIOD</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.272">MAJOR_COMPACTION_PERIOD</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.274">MAJOR_COMPACTION_PERIOD</a></pre>
 <div class="block">Parameter name for how often a region should should perform a major compaction</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MAJOR_COMPACTION_PERIOD">Constant Field Values</a></dd></dl>
 </li>
@@ -2544,7 +2544,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>COMPACTION_KV_MAX</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.275">COMPACTION_KV_MAX</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.277">COMPACTION_KV_MAX</a></pre>
 <div class="block">Parameter name for the maximum batch of KVs to be used in flushes and compactions</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.COMPACTION_KV_MAX">Constant Field Values</a></dd></dl>
 </li>
@@ -2555,7 +2555,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>COMPACTION_KV_MAX_DEFAULT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.276">COMPACTION_KV_MAX_DEFAULT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.278">COMPACTION_KV_MAX_DEFAULT</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.COMPACTION_KV_MAX_DEFAULT">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2565,7 +2565,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_DIR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.279">HBASE_DIR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.281">HBASE_DIR</a></pre>
 <div class="block">Parameter name for HBase instance root directory</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_DIR">Constant Field Values</a></dd></dl>
 </li>
@@ -2576,7 +2576,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_CLIENT_IPC_POOL_TYPE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.282">HBASE_CLIENT_IPC_POOL_TYPE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.284">HBASE_CLIENT_IPC_POOL_TYPE</a></pre>
 <div class="block">Parameter name for HBase client IPC pool type</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_IPC_POOL_TYPE">Constant Field Values</a></dd></dl>
 </li>
@@ -2587,7 +2587,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_CLIENT_IPC_POOL_SIZE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.285">HBASE_CLIENT_IPC_POOL_SIZE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.287">HBASE_CLIENT_IPC_POOL_SIZE</a></pre>
 <div class="block">Parameter name for HBase client IPC pool size</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_IPC_POOL_SIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -2598,7 +2598,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_CLIENT_OPERATION_TIMEOUT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.288">HBASE_CLIENT_OPERATION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.290">HBASE_CLIENT_OPERATION_TIMEOUT</a></pre>
 <div class="block">Parameter name for HBase client operation timeout, which overrides RPC timeout</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_OPERATION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2609,7 +2609,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBASE_CLIENT_META_OPERATION_TIMEOUT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.291">HBASE_CLIENT_META_OPERATION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.293">HBASE_CLIENT_META_OPERATION_TIMEOUT</a></pre>
 <div class="block">Parameter name for HBase client operation timeout, which overrides RPC timeout</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_META_OPERATION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2620,7 +2620,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.295">DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.297">DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT</a></pre>
 <div class="block">Default HBase client operation timeout, which is tantamount to a blocking call</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT">Constant Field Values</a></dd></dl>
 </li>
@@ -2631,7 +2631,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_LOGDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.298">HREGION_LOGDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.300">HREGION_LOGDIR_NAME</a></pre>
 <div class="block">Used to construct the name of the log directory for a region server</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_LOGDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2642,7 +2642,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>SPLIT_LOGDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.301">SPLIT_LOGDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.303">SPLIT_LOGDIR_NAME</a></pre>
 <div class="block">Used to construct the name of the splitlog directory for a region server</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.SPLIT_LOGDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2653,7 +2653,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_OLDLOGDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.304">HREGION_OLDLOGDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.306">HREGION_OLDLOGDIR_NAME</a></pre>
 <div class="block">Like the previous, but for old logs that are about to be deleted</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_OLDLOGDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2664,7 +2664,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CORRUPT_DIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.306">CORRUPT_DIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.308">CORRUPT_DIR_NAME</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CORRUPT_DIR_NAME">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2674,7 +2674,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HBCK_SIDELINEDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.309">HBCK_SIDELINEDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.311">HBCK_SIDELINEDIR_NAME</a></pre>
 <div class="block">Used by HBCK to sideline backup data</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HBCK_SIDELINEDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2685,7 +2685,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MIGRATION_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.312">MIGRATION_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.314">MIGRATION_NAME</a></pre>
 <div class="block">Any artifacts left from migration can be moved here</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MIGRATION_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2696,7 +2696,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>LIB_DIR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.319">LIB_DIR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.321">LIB_DIR</a></pre>
 <div class="block">The directory from which co-processor/custom filter jars can be loaded
  dynamically by the region servers. This value can be overridden by the
  hbase.dynamic.jars.dir config.</div>
@@ -2709,7 +2709,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_COMPACTIONDIR_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.322">HREGION_COMPACTIONDIR_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.324">HREGION_COMPACTIONDIR_NAME</a></pre>
 <div class="block">Used to construct the name of the compaction directory during compaction</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_COMPACTIONDIR_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2720,7 +2720,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_MAX_FILESIZE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.325">HREGION_MAX_FILESIZE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.327">HREGION_MAX_FILESIZE</a></pre>
 <div class="block">Conf key for the max file size after which we split the region</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_MAX_FILESIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -2731,7 +2731,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MAX_FILE_SIZE</h4>
-<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.329">DEFAULT_MAX_FILE_SIZE</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.331">DEFAULT_MAX_FILE_SIZE</a></pre>
 <div class="block">Default maximum file size</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_MAX_FILE_SIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -2742,7 +2742,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_MAX_ROWSIZE_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.334">TABLE_MAX_ROWSIZE_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.336">TABLE_MAX_ROWSIZE_KEY</a></pre>
 <div class="block">Max size of single row for Get's or Scan's without in-row scanning flag set.</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.TABLE_MAX_ROWSIZE_KEY">Constant Field Values</a></dd></dl>
 </li>
@@ -2753,7 +2753,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE_MAX_ROWSIZE_DEFAULT</h4>
-<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.339">TABLE_MAX_ROWSIZE_DEFAULT</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.341">TABLE_MAX_ROWSIZE_DEFAULT</a></pre>
 <div class="block">Default max row size (1 Gb).</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.TABLE_MAX_ROWSIZE_DEFAULT">Constant Field Values</a></dd></dl>
 </li>
@@ -2764,7 +2764,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HSTORE_OPEN_AND_CLOSE_THREADS_MAX</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.345">HSTORE_OPEN_AND_CLOSE_THREADS_MAX</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.347">HSTORE_OPEN_AND_CLOSE_THREADS_MAX</a></pre>
 <div class="block">The max number of threads used for opening and closing stores or store
  files in parallel</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HSTORE_OPEN_AND_CLOSE_THREADS_MAX">Constant Field Values</a></dd></dl>
@@ -2776,7 +2776,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.352">DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.354">DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX</a></pre>
 <div class="block">The default number for the max number of threads used for opening and
  closing stores or store files in parallel</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX">Constant Field Values</a></dd></dl>
@@ -2788,7 +2788,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_MEMSTORE_BLOCK_MULTIPLIER</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.359">HREGION_MEMSTORE_BLOCK_MULTIPLIER</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.361">HREGION_MEMSTORE_BLOCK_MULTIPLIER</a></pre>
 <div class="block">Block updates if memstore has hbase.hregion.memstore.block.multiplier
  times hbase.hregion.memstore.flush.size bytes.  Useful preventing
  runaway memstore during spikes in update traffic.</div>
@@ -2801,7 +2801,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.365">DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.367">DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER</a></pre>
 <div class="block">Default value for hbase.hregion.memstore.block.multiplier</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER">Constant Field Values</a></dd></dl>
 </li>
@@ -2812,7 +2812,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_MEMSTORE_FLUSH_SIZE</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.368">HREGION_MEMSTORE_FLUSH_SIZE</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.370">HREGION_MEMSTORE_FLUSH_SIZE</a></pre>
 <div class="block">Conf key for the memstore size at which we flush the memstore</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_MEMSTORE_FLUSH_SIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -2823,7 +2823,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>HREGION_EDITS_REPLAY_SKIP_ERRORS</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.371">HREGION_EDITS_REPLAY_SKIP_ERRORS</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.373">HREGION_EDITS_REPLAY_SKIP_ERRORS</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2833,7 +2833,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.374">DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.376">DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2843,7 +2843,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MAXIMUM_VALUE_LENGTH</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.378">MAXIMUM_VALUE_LENGTH</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.380">MAXIMUM_VALUE_LENGTH</a></pre>
 <div class="block">Maximum value length, enforced on KeyValue construction</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MAXIMUM_VALUE_LENGTH">Constant Field Values</a></dd></dl>
 </li>
@@ -2854,7 +2854,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_ID_FILE_NAME</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.381">CLUSTER_ID_FILE_NAME</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.383">CLUSTER_ID_FILE_NAME</a></pre>
 <div class="block">name of the file for unique cluster ID</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_ID_FILE_NAME">Constant Field Values</a></dd></dl>
 </li>
@@ -2865,7 +2865,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>CLUSTER_ID_DEFAULT</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.384">CLUSTER_ID_DEFAULT</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.386">CLUSTER_ID_DEFAULT</a></pre>
 <div class="block">Default value for cluster ID</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CLUSTER_ID_DEFAULT">Constant Field Values</a></dd></dl>
 </li>
@@ -2876,7 +2876,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>KEEP_SEQID_PERIOD</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.387">KEEP_SEQID_PERIOD</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.389">KEEP_SEQID_PERIOD</a></pre>
 <div class="block">Parameter name for # days to keep MVCC values during a major compaction</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.KEEP_SEQID_PERIOD">Constant Field Values</a></dd></dl>
 </li>
@@ -2887,7 +2887,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>MIN_KEEP_SEQID_PERIOD</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.389">MIN_KEEP_SEQID_PERIOD</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.391">MIN_KEEP_SEQID_PERIOD</a></pre>
 <div class="block">At least to keep MVCC values in hfiles for 5 days</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.MIN_KEEP_SEQID_PERIOD">Constant Field Values</a></dd></dl>
 </li>
@@ -2899,7 +2899,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <li class="blockList">
 <h4>META_TABLE_NAME</h4>
 <pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.417">META_TABLE_NAME</a></pre>
+public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.419">META_TABLE_NAME</a></pre>
 <div class="block"><span class="strong">Deprecated.</span>&nbsp;<i>For upgrades of 0.94 to 0.96</i></div>
 <div class="block">The hbase:meta table's name.</div>
 </li>
@@ -2910,7 +2910,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>BASE_NAMESPACE_DIR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.419">BASE_NAMESPACE_DIR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.421">BASE_NAMESPACE_DIR</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.BASE_NAMESPACE_DIR">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -2920,7 +2920,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>META_ROW_DELIMITER</h4>
-<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.422">META_ROW_DELIMITER</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.424">META_ROW_DELIMITER</a></pre>
 <div class="block">delimiter used between portions of a region name</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.META_ROW_DELIMITER">Constant Field Values</a></dd></dl>
 </li>
@@ -2931,7 +2931,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>CATALOG_FAMILY_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.425">CATALOG_FAMILY_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.427">CATALOG_FAMILY_STR</a></pre>
 <div class="block">The catalog family as a string</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.CATALOG_FAMILY_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -2942,7 +2942,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>CATALOG_FAMILY</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.428">CATALOG_FAMILY</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.430">CATALOG_FAMILY</a></pre>
 <div class="block">The catalog family</div>
 </li>
 </ul>
@@ -2952,7 +2952,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONINFO_QUALIFIER_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.431">REGIONINFO_QUALIFIER_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.433">REGIONINFO_QUALIFIER_STR</a></pre>
 <div class="block">The RegionInfo qualifier as a string</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.REGIONINFO_QUALIFIER_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -2963,7 +2963,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>REGIONINFO_QUALIFIER</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.434">REGIONINFO_QUALIFIER</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.436">REGIONINFO_QUALIFIER</a></pre>
 <div class="block">The regioninfo column qualifier</div>
 </li>
 </ul>
@@ -2973,7 +2973,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>SERVER_QUALIFIER_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.437">SERVER_QUALIFIER_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.439">SERVER_QUALIFIER_STR</a></pre>
 <div class="block">The server column qualifier</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.SERVER_QUALIFIER_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -2984,7 +2984,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>SERVER_QUALIFIER</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.439">SERVER_QUALIFIER</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.441">SERVER_QUALIFIER</a></pre>
 <div class="block">The server column qualifier</div>
 </li>
 </ul>
@@ -2994,7 +2994,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>STARTCODE_QUALIFIER_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.442">STARTCODE_QUALIFIER_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.444">STARTCODE_QUALIFIER_STR</a></pre>
 <div class="block">The startcode column qualifier</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.STARTCODE_QUALIFIER_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -3005,7 +3005,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>STARTCODE_QUALIFIER</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.444">STARTCODE_QUALIFIER</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.446">STARTCODE_QUALIFIER</a></pre>
 <div class="block">The startcode column qualifier</div>
 </li>
 </ul>
@@ -3015,7 +3015,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>SEQNUM_QUALIFIER_STR</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.447">SEQNUM_QUALIFIER_STR</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.html#line.449">SEQNUM_QUALIFIER_STR</a></pre>
 <div class="block">The open seqnum column qualifier</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../constant-values.html#org.apache.hadoop.hbase.HConstants.SEQNUM_QUALIFIER_STR">Constant Field Values</a></dd></dl>
 </li>
@@ -3026,7 +3026,7 @@ public static final&nbsp;byte[] <a href="../../../../src-html/org/apache/hado

<TRUNCATED>

[35/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
index 68bcec6..0ab9f9f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html
@@ -99,7 +99,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.785">HFileBlock.Writer</a>
+<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.822">HFileBlock.Writer</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Unified version 2 <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> block writer. The intended usage pattern
  is as follows:
@@ -107,7 +107,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
  <li>Construct an <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock.Writer</code></a>, providing a compression algorithm.
  <li>Call <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#startWriting(org.apache.hadoop.hbase.io.hfile.BlockType)"><code>startWriting(org.apache.hadoop.hbase.io.hfile.BlockType)</code></a> and get a data stream to write to.
  <li>Write your data into the stream.
- <li>Call <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeHeaderAndData(org.apache.hadoop.fs.FSDataOutputStream)"><code>writeHeaderAndData(FSDataOutputStream)</code></a> as many times as you need to.
+ <li>Call Writer#writeHeaderAndData(FSDataOutputStream) as many times as you need to.
  store the serialized block into an external stream.
  <li>Repeat to write more blocks.
  </ol>
@@ -271,7 +271,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <th class="colLast" scope="col">Method and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>int</code></td>
+<td class="colFirst"><code>(package private) int</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#blockSizeWritten()">blockSizeWritten</a></strong>()</code>
 <div class="block">Returns the number of bytes written into the current block so far, or
  zero if not writing the block at the moment.</div>
@@ -304,7 +304,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
+<td class="colFirst"><code>(package private) <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#getBlockForCaching(org.apache.hadoop.hbase.io.hfile.CacheConfig)">getBlockForCaching</a></strong>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</code>
 <div class="block">Creates a new HFileBlock.</div>
 </td>
@@ -360,7 +360,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>boolean</code></td>
+<td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#isWriting()">isWriting</a></strong>()</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -374,25 +374,25 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#release()">release</a></strong>()</code>
 <div class="block">Releases resources used by this writer.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a></code></td>
+<td class="colFirst"><code>(package private) <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#startWriting(org.apache.hadoop.hbase.io.hfile.BlockType)">startWriting</a></strong>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;newBlockType)</code>
 <div class="block">Starts writing into the block.</div>
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#write(org.apache.hadoop.hbase.Cell)">write</a></strong>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)</code>
 <div class="block">Writes the Cell to this block</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeBlock(org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable,%20org.apache.hadoop.fs.FSDataOutputStream)">writeBlock</a></strong>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a>&nbsp;bw,
                     org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)</code>
 <div class="block">Takes the given <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock.BlockWritable</code></a> instance, creates a new block of
@@ -401,7 +401,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeHeaderAndData(org.apache.hadoop.fs.FSDataOutputStream)">writeHeaderAndData</a></strong>(org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)</code>
 <div class="block">Similar to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeHeaderAndData(org.apache.hadoop.fs.FSDataOutputStream)"><code>writeHeaderAndData(FSDataOutputStream)</code></a>, but records
  the offset of this block so that it can be referenced in the next block
@@ -436,7 +436,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>state</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.794">state</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.830">state</a></pre>
 <div class="block">Writer state. Used to ensure the correct usage protocol.</div>
 </li>
 </ul>
@@ -446,7 +446,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>dataBlockEncoder</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.797">dataBlockEncoder</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.833">dataBlockEncoder</a></pre>
 <div class="block">Data block encoder used for data blocks</div>
 </li>
 </ul>
@@ -456,7 +456,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>dataBlockEncodingCtx</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.799">dataBlockEncodingCtx</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockEncodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.835">dataBlockEncodingCtx</a></pre>
 </li>
 </ul>
 <a name="defaultBlockEncodingCtx">
@@ -465,7 +465,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>defaultBlockEncodingCtx</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultEncodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.802">defaultBlockEncodingCtx</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultEncodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.838">defaultBlockEncodingCtx</a></pre>
 <div class="block">block encoding context for non-data blocks</div>
 </li>
 </ul>
@@ -475,7 +475,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>baosInMemory</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteArrayOutputStream.html" title="class in org.apache.hadoop.hbase.io">ByteArrayOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.810">baosInMemory</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteArrayOutputStream.html" title="class in org.apache.hadoop.hbase.io">ByteArrayOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.846">baosInMemory</a></pre>
 <div class="block">The stream we use to accumulate data in uncompressed format for each
  block. We reset this stream at the end of each block and reuse it. The
  header is written as the first <a href="../../../../../../org/apache/hadoop/hbase/HConstants.html#HFILEBLOCK_HEADER_SIZE"><code>HConstants.HFILEBLOCK_HEADER_SIZE</code></a> bytes into this
@@ -488,7 +488,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>blockType</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.817">blockType</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.853">blockType</a></pre>
 <div class="block">Current block type. Set in <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#startWriting(org.apache.hadoop.hbase.io.hfile.BlockType)"><code>startWriting(BlockType)</code></a>. Could be
  changed in <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#finishBlock()"><code>finishBlock()</code></a> from <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#DATA"><code>BlockType.DATA</code></a>
  to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a>.</div>
@@ -500,7 +500,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>userDataStream</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.823">userDataStream</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.859">userDataStream</a></pre>
 <div class="block">A stream that we write uncompressed bytes to, which compresses them and
  writes them to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#baosInMemory"><code>baosInMemory</code></a>.</div>
 </li>
@@ -511,7 +511,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>unencodedDataSizeWritten</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.827">unencodedDataSizeWritten</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.863">unencodedDataSizeWritten</a></pre>
 </li>
 </ul>
 <a name="onDiskBytesWithHeader">
@@ -520,7 +520,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>onDiskBytesWithHeader</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.834">onDiskBytesWithHeader</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.870">onDiskBytesWithHeader</a></pre>
 <div class="block">Bytes to be written to the file system, including the header. Compressed
  if compression is turned on. It also includes the checksum data that
  immediately follows the block data. (header + data + checksums)</div>
@@ -532,7 +532,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>onDiskChecksum</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.842">onDiskChecksum</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.878">onDiskChecksum</a></pre>
 <div class="block">The size of the checksum data on disk. It is used only if data is
  not compressed. If data is compressed, then the checksums are already
  part of onDiskBytesWithHeader. If data is uncompressed, then this
@@ -545,7 +545,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>uncompressedBytesWithHeader</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.851">uncompressedBytesWithHeader</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.887">uncompressedBytesWithHeader</a></pre>
 <div class="block">Valid in the READY state. Contains the header and the uncompressed (but
  potentially encoded, if this is a data block) bytes, so the length is
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html#uncompressedSizeWithoutHeader"><code>HFileBlock.uncompressedSizeWithoutHeader</code></a> +
@@ -559,7 +559,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>startOffset</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.857">startOffset</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.893">startOffset</a></pre>
 <div class="block">Current block's start offset in the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>. Set in
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeHeaderAndData(org.apache.hadoop.fs.FSDataOutputStream)"><code>writeHeaderAndData(FSDataOutputStream)</code></a>.</div>
 </li>
@@ -570,7 +570,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>prevOffsetByType</h4>
-<pre>private&nbsp;long[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.863">prevOffsetByType</a></pre>
+<pre>private&nbsp;long[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.899">prevOffsetByType</a></pre>
 <div class="block">Offset of previous block by block type. Updated when the next block is
  started.</div>
 </li>
@@ -581,7 +581,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>prevOffset</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.866">prevOffset</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.902">prevOffset</a></pre>
 <div class="block">The offset of the previous block of the same type</div>
 </li>
 </ul>
@@ -591,7 +591,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fileContext</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.868">fileContext</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.904">fileContext</a></pre>
 <div class="block">Meta data that holds information about the hfileblock</div>
 </li>
 </ul>
@@ -609,7 +609,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileBlock.Writer</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.873">HFileBlock.Writer</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;dataBlockEncoder,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.909">HFileBlock.Writer</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;dataBlockEncoder,
                  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext)</pre>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>dataBlockEncoder</code> - data block encoding algorithm to use</dd></dl>
 </li>
@@ -628,8 +628,8 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>startWriting</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.902">startWriting</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;newBlockType)
-                              throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.939">startWriting</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;newBlockType)
+                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Starts writing into the block. The previous block's data is discarded.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the stream the user can write their data into</dd>
 <dt><span class="strong">Throws:</span></dt>
@@ -642,8 +642,8 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>write</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.932">write</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
-           throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.969">write</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
+     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the Cell to this block</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>cell</code> - </dd>
 <dt><span class="strong">Throws:</span></dt>
@@ -656,7 +656,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getUserDataStream</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.945">getUserDataStream</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.982">getUserDataStream</a>()</pre>
 <div class="block">Returns the stream for the user to write to. The block writer takes care
  of handling compression and buffering for caching on write. Can only be
  called in the "writing" state.</div>
@@ -669,7 +669,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>ensureBlockReady</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.954">ensureBlockReady</a>()
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.991">ensureBlockReady</a>()
                 throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Transitions the block writer from the "writing" state to the "block
  ready" state.  Does nothing if a block is already finished.</div>
@@ -683,7 +683,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>finishBlock</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.971">finishBlock</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1008">finishBlock</a>()
                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">An internal method that flushes the compressing stream (if using
  compression), serializes the header, and takes care of the separate
@@ -699,7 +699,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>putHeader</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1020">putHeader</a>(byte[]&nbsp;dest,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1061">putHeader</a>(byte[]&nbsp;dest,
              int&nbsp;offset,
              int&nbsp;onDiskSize,
              int&nbsp;uncompressedSize,
@@ -716,8 +716,8 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>writeHeaderAndData</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1039">writeHeaderAndData</a>(org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)
-                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1080">writeHeaderAndData</a>(org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)
+                  throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Similar to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#writeHeaderAndData(org.apache.hadoop.fs.FSDataOutputStream)"><code>writeHeaderAndData(FSDataOutputStream)</code></a>, but records
  the offset of this block so that it can be referenced in the next block
  of the same type.</div>
@@ -732,7 +732,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>finishBlockAndWriteHeaderAndData</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1060">finishBlockAndWriteHeaderAndData</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1101">finishBlockAndWriteHeaderAndData</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
                                          throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the header and the compressed data of this block (or uncompressed
  data when not using compression) into the given stream. Can be called in
@@ -749,7 +749,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getHeaderAndDataForTest</h4>
-<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1077">getHeaderAndDataForTest</a>()
+<pre>byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1118">getHeaderAndDataForTest</a>()
                          throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns the header or the compressed data (or uncompressed data when not
  using compression) as a byte array. Can be called in the "writing" state
@@ -767,7 +767,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>release</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1094">release</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1135">release</a>()</pre>
 <div class="block">Releases resources used by this writer.</div>
 </li>
 </ul>
@@ -777,7 +777,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithoutHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1113">getOnDiskSizeWithoutHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1154">getOnDiskSizeWithoutHeader</a>()</pre>
 <div class="block">Returns the on-disk size of the data portion of the block. This is the
  compressed size if compression is enabled. Can only be called in the
  "block ready" state. Header is not compressed, and its size is not
@@ -791,7 +791,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskSizeWithHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1127">getOnDiskSizeWithHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1167">getOnDiskSizeWithHeader</a>()</pre>
 <div class="block">Returns the on-disk size of the block. Can only be called in the
  "block ready" state.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the on-disk size of the block ready to be written, including the
@@ -804,7 +804,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncompressedSizeWithoutHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1135">getUncompressedSizeWithoutHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1175">getUncompressedSizeWithoutHeader</a>()</pre>
 <div class="block">The uncompressed size of the block data. Does not include header size.</div>
 </li>
 </ul>
@@ -814,7 +814,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncompressedSizeWithHeader</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1143">getUncompressedSizeWithHeader</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1183">getUncompressedSizeWithHeader</a>()</pre>
 <div class="block">The uncompressed size of the block data, including header size.</div>
 </li>
 </ul>
@@ -824,7 +824,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isWriting</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1149">isWriting</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1189">isWriting</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>true if a block is being written</dd></dl>
 </li>
 </ul>
@@ -834,7 +834,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>blockSizeWritten</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1160">blockSizeWritten</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1200">blockSizeWritten</a>()</pre>
 <div class="block">Returns the number of bytes written into the current block so far, or
  zero if not writing the block at the moment. Note that this will return
  zero in the "block ready" state as well.</div>
@@ -847,7 +847,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncompressedBufferWithHeader</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1173">getUncompressedBufferWithHeader</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1213">getUncompressedBufferWithHeader</a>()</pre>
 <div class="block">Returns the header followed by the uncompressed data, even if using
  compression. This is needed for storing uncompressed blocks in the block
  cache. Can be called in the "writing" state or the "block ready" state.
@@ -861,7 +861,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnDiskBufferWithHeader</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1186">getOnDiskBufferWithHeader</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1226">getOnDiskBufferWithHeader</a>()</pre>
 <div class="block">Returns the header followed by the on-disk (compressed/encoded/encrypted) data. This is
  needed for storing packed blocks in the block cache. Expects calling semantics identical to
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#getUncompressedBufferWithHeader()"><code>getUncompressedBufferWithHeader()</code></a>. Returns only the header and data,
@@ -875,7 +875,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>expectState</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1191">expectState</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&nbsp;expectedState)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1231">expectState</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&nbsp;expectedState)</pre>
 </li>
 </ul>
 <a name="writeBlock(org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable, org.apache.hadoop.fs.FSDataOutputStream)">
@@ -884,9 +884,9 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>writeBlock</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1208">writeBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a>&nbsp;bw,
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1248">writeBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a>&nbsp;bw,
               org.apache.hadoop.fs.FSDataOutputStream&nbsp;out)
-                throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+          throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Takes the given <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock.BlockWritable</code></a> instance, creates a new block of
  its appropriate type, writes the writable into this block, and flushes
  the block into the output stream. The writer is instructed not to buffer
@@ -902,7 +902,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getBlockForCaching</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1221">getBlockForCaching</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.1261">getBlockForCaching</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
 <div class="block">Creates a new HFileBlock. Checksums have already been validated, so
  the byte buffer passed into the constructor of this newly created
  block does not have checksum data even though the header minor


[03/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
index 9a60dce..fcaf416 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
@@ -34,1938 +34,1994 @@
 <span class="sourceLineNo">026</span>import java.util.concurrent.locks.Lock;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.27"></a>
 <span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.Cell;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.HConstants;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.io.IOUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>import com.google.common.annotations.VisibleForTesting;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import com.google.common.base.Preconditions;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>/**<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * Reading {@link HFile} version 1 and 2 blocks, and writing version 2 blocks.<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * &lt;ul&gt;<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * &lt;li&gt;In version 1 all blocks are always compressed or uncompressed, as<a name="line.59"></a>
-<span class="sourceLineNo">060</span> * specified by the {@link HFile}'s compression algorithm, with a type-specific<a name="line.60"></a>
-<span class="sourceLineNo">061</span> * magic record stored in the beginning of the compressed data (i.e. one needs<a name="line.61"></a>
-<span class="sourceLineNo">062</span> * to uncompress the compressed block to determine the block type). There is<a name="line.62"></a>
-<span class="sourceLineNo">063</span> * only a single compression algorithm setting for all blocks. Offset and size<a name="line.63"></a>
-<span class="sourceLineNo">064</span> * information from the block index are required to read a block.<a name="line.64"></a>
-<span class="sourceLineNo">065</span> * &lt;li&gt;In version 2 a block is structured as follows:<a name="line.65"></a>
-<span class="sourceLineNo">066</span> * &lt;ul&gt;<a name="line.66"></a>
-<span class="sourceLineNo">067</span> * &lt;li&gt;header (see Writer#finishBlock())<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * &lt;ul&gt;<a name="line.68"></a>
-<span class="sourceLineNo">069</span> * &lt;li&gt;Magic record identifying the block type (8 bytes)<a name="line.69"></a>
-<span class="sourceLineNo">070</span> * &lt;li&gt;Compressed block size, excluding header, including checksum (4 bytes)<a name="line.70"></a>
-<span class="sourceLineNo">071</span> * &lt;li&gt;Uncompressed block size, excluding header, excluding checksum (4 bytes)<a name="line.71"></a>
-<span class="sourceLineNo">072</span> * &lt;li&gt;The offset of the previous block of the same type (8 bytes). This is<a name="line.72"></a>
-<span class="sourceLineNo">073</span> * used to be able to navigate to the previous block without going to the block<a name="line.73"></a>
-<span class="sourceLineNo">074</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.74"></a>
-<span class="sourceLineNo">075</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.75"></a>
-<span class="sourceLineNo">076</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the size of data on disk, including header,<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * excluding checksums (4 bytes)<a name="line.77"></a>
-<span class="sourceLineNo">078</span> * &lt;/ul&gt;<a name="line.78"></a>
-<span class="sourceLineNo">079</span> * &lt;/li&gt;<a name="line.79"></a>
-<span class="sourceLineNo">080</span> * &lt;li&gt;Raw/Compressed/Encrypted/Encoded data. The compression algorithm is the<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * same for all the blocks in the {@link HFile}, similarly to what was done in<a name="line.81"></a>
-<span class="sourceLineNo">082</span> * version 1.<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * &lt;li&gt;For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * the number of bytes specified by bytesPerChecksum.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * &lt;/ul&gt;<a name="line.85"></a>
-<span class="sourceLineNo">086</span> * &lt;/ul&gt;<a name="line.86"></a>
-<span class="sourceLineNo">087</span> */<a name="line.87"></a>
-<span class="sourceLineNo">088</span>@InterfaceAudience.Private<a name="line.88"></a>
-<span class="sourceLineNo">089</span>public class HFileBlock implements Cacheable {<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>  /**<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * On a checksum failure on a Reader, these many suceeding read<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * requests switch back to using hdfs checksums before auto-reenabling<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * hbase checksum verification.<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final boolean FILL_HEADER = true;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  /**<a name="line.101"></a>
-<span class="sourceLineNo">102</span>   * The size of block header when blockType is {@link BlockType#ENCODED_DATA}.<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   * This extends normal header by adding the id of encoder.<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   */<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  public static final int ENCODED_HEADER_SIZE = HConstants.HFILEBLOCK_HEADER_SIZE<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      + DataBlockEncoding.ID_SIZE;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.108"></a>
-<span class="sourceLineNo">109</span>     new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>  // meta.usesHBaseChecksum+offset+nextBlockOnDiskSizeWithHeader<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      + Bytes.SIZEOF_LONG;<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  /**<a name="line.119"></a>
-<span class="sourceLineNo">120</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  static final CacheableDeserializer&lt;Cacheable&gt; blockDeserializer =<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.126"></a>
-<span class="sourceLineNo">127</span>            throws IOException {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>          buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();<a name="line.128"></a>
-<span class="sourceLineNo">129</span>          ByteBuff newByteBuffer;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          if (reuse) {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>            newByteBuffer = buf.slice();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>          } else {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>            // Used only in tests<a name="line.133"></a>
-<span class="sourceLineNo">134</span>            int len = buf.limit();<a name="line.134"></a>
-<span class="sourceLineNo">135</span>            newByteBuffer = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.135"></a>
-<span class="sourceLineNo">136</span>            newByteBuffer.put(0, buf, buf.position(), len);<a name="line.136"></a>
-<span class="sourceLineNo">137</span>          }<a name="line.137"></a>
-<span class="sourceLineNo">138</span>          buf.position(buf.limit());<a name="line.138"></a>
-<span class="sourceLineNo">139</span>          buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>          boolean usesChecksum = buf.get() == (byte)1;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>          HFileBlock hFileBlock = new HFileBlock(newByteBuffer, usesChecksum, memType);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>          hFileBlock.offset = buf.getLong();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>          hFileBlock.nextBlockOnDiskSizeWithHeader = buf.getInt();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>          if (hFileBlock.hasNextBlockHeader()) {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>            hFileBlock.buf.limit(hFileBlock.buf.limit() - hFileBlock.headerSize());<a name="line.145"></a>
-<span class="sourceLineNo">146</span>          }<a name="line.146"></a>
-<span class="sourceLineNo">147</span>          return hFileBlock;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>        @Override<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        public int getDeserialiserIdentifier() {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>          return deserializerIdentifier;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>        @Override<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>          // Used only in tests<a name="line.157"></a>
-<span class="sourceLineNo">158</span>          return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        }<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      };<a name="line.160"></a>
-<span class="sourceLineNo">161</span>  private static final int deserializerIdentifier;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  static {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    deserializerIdentifier = CacheableDeserializerIdManager<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        .registerDeserializer(blockDeserializer);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  }<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  /** Type of block. Header field 0. */<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private BlockType blockType;<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>  /** Size on disk excluding header, including checksum. Header field 1. */<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  private int onDiskSizeWithoutHeader;<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /** Size of pure data. Does not include header or checksums. Header field 2. */<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  private final int uncompressedSizeWithoutHeader;<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  /** The offset of the previous block on disk. Header field 3. */<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private final long prevBlockOffset;<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  /**<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final int onDiskDataSizeWithHeader;<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /** The in-memory representation of the hfile block */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private ByteBuff buf;<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span>  /** Meta data that holds meta information on the hfileblock */<a name="line.188"></a>
-<span class="sourceLineNo">189</span>  private HFileContext fileContext;<a name="line.189"></a>
+<span class="sourceLineNo">029</span>import org.apache.commons.logging.Log;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.commons.logging.LogFactory;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.fs.Path;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.Cell;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.HConstants;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.io.IOUtils;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>import com.google.common.annotations.VisibleForTesting;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import com.google.common.base.Preconditions;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>/**<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * Reads {@link HFile} version 1 and version 2 blocks but writes version 2 blocks only.<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * Version 2 was introduced in hbase-0.92.0. Does read and write out to the filesystem but also<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * the read and write to Cache.<a name="line.61"></a>
+<span class="sourceLineNo">062</span> *<a name="line.62"></a>
+<span class="sourceLineNo">063</span> * &lt;h3&gt;HFileBlock: Version 1&lt;/h3&gt;<a name="line.63"></a>
+<span class="sourceLineNo">064</span> * As of this writing, there should be no more version 1 blocks found out in the wild. Version 2<a name="line.64"></a>
+<span class="sourceLineNo">065</span> * as introduced in hbase-0.92.0.<a name="line.65"></a>
+<span class="sourceLineNo">066</span> * In version 1 all blocks are always compressed or uncompressed, as<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * specified by the {@link HFile}'s compression algorithm, with a type-specific<a name="line.67"></a>
+<span class="sourceLineNo">068</span> * magic record stored in the beginning of the compressed data (i.e. one needs<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * to uncompress the compressed block to determine the block type). There is<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * only a single compression algorithm setting for all blocks. Offset and size<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * information from the block index are required to read a block.<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * &lt;h3&gt;HFileBlock: Version 2&lt;/h3&gt;<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * In version 2, a block is structured as follows:<a name="line.73"></a>
+<span class="sourceLineNo">074</span> * &lt;ul&gt;<a name="line.74"></a>
+<span class="sourceLineNo">075</span> * &lt;li&gt;&lt;b&gt;Header:&lt;/b&gt; See Writer#putHeader(); header total size is HFILEBLOCK_HEADER_SIZE)<a name="line.75"></a>
+<span class="sourceLineNo">076</span> * &lt;ul&gt;<a name="line.76"></a>
+<span class="sourceLineNo">077</span> * &lt;li&gt;Magic record identifying the {@link BlockType} (8 bytes): e.g. &lt;code&gt;DATABLK*&lt;/code&gt;<a name="line.77"></a>
+<span class="sourceLineNo">078</span> * &lt;li&gt;Compressed -- a.k.a 'on disk' -- block size, excluding header, but including<a name="line.78"></a>
+<span class="sourceLineNo">079</span> *     tailing checksum bytes (4 bytes)<a name="line.79"></a>
+<span class="sourceLineNo">080</span> * &lt;li&gt;Uncompressed block size, excluding header, and excluding checksum bytes (4 bytes)<a name="line.80"></a>
+<span class="sourceLineNo">081</span> * &lt;li&gt;The offset of the previous block of the same type (8 bytes). This is<a name="line.81"></a>
+<span class="sourceLineNo">082</span> * used to navigate to the previous block without having to go to the block index<a name="line.82"></a>
+<span class="sourceLineNo">083</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.83"></a>
+<span class="sourceLineNo">084</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the size of data 'on disk', including header,<a name="line.85"></a>
+<span class="sourceLineNo">086</span> * excluding checksums (4 bytes)<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * &lt;/ul&gt;<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * &lt;/li&gt;<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * &lt;li&gt;&lt;b&gt;Raw/Compressed/Encrypted/Encoded data:&lt;/b&gt; The compression algorithm is the<a name="line.89"></a>
+<span class="sourceLineNo">090</span> * same for all the blocks in the {@link HFile}, similarly to what was done in<a name="line.90"></a>
+<span class="sourceLineNo">091</span> * version 1. If compression is NONE, this is just raw, serialized Cells.<a name="line.91"></a>
+<span class="sourceLineNo">092</span> * &lt;li&gt;&lt;b&gt;Tail:&lt;/b&gt; For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.92"></a>
+<span class="sourceLineNo">093</span> * the number of bytes specified by bytesPerChecksum.<a name="line.93"></a>
+<span class="sourceLineNo">094</span> * &lt;/ul&gt;<a name="line.94"></a>
+<span class="sourceLineNo">095</span> * &lt;p&gt;Be aware that when we read from HDFS, we overread pulling in the next blocks' header too.<a name="line.95"></a>
+<span class="sourceLineNo">096</span> * We do this to save having to do two seeks to read an HFileBlock; a seek to read the header<a name="line.96"></a>
+<span class="sourceLineNo">097</span> * to figure lengths, etc., and then another seek to pull in the data.<a name="line.97"></a>
+<span class="sourceLineNo">098</span> */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>@InterfaceAudience.Private<a name="line.99"></a>
+<span class="sourceLineNo">100</span>public class HFileBlock implements Cacheable {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final Log LOG = LogFactory.getLog(HFileBlock.class);<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  /**<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   * On a checksum failure, do these many succeeding read requests using hdfs checksums before<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * auto-reenabling hbase checksum verification.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   */<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.107"></a>
+<span class="sourceLineNo">108</span><a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static int UNSET = -1;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  public static final boolean FILL_HEADER = true;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.111"></a>
+<span class="sourceLineNo">112</span><a name="line.112"></a>
+<span class="sourceLineNo">113</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.115"></a>
+<span class="sourceLineNo">116</span><a name="line.116"></a>
+<span class="sourceLineNo">117</span>  /**<a name="line.117"></a>
+<span class="sourceLineNo">118</span>   * See #blockDeserializer method for more info.<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * 13 bytes of extra stuff stuck on the end of the HFileBlock that we pull in from HDFS (note,<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * when we read from HDFS, we pull in an HFileBlock AND the header of the next block if one).<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   * The 13 bytes are: usesHBaseChecksum (1 byte) + offset of this block (long) +<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * nextBlockOnDiskSizeWithHeader (int).<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   */<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public static final int EXTRA_SERIALIZATION_SPACE =<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT + Bytes.SIZEOF_LONG;<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>  /**<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  /**<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * Used deserializing blocks from Cache.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   *<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * Serializing to cache is a little hard to follow. See Writer#finishBlock for where it is done.<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   * When we start to append to a new HFileBlock,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>   * we skip over where the header should go before we start adding Cells. When the block is<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * done, we'll then go back and fill in the header and the checksum tail. Be aware that what<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   * gets serialized into the blockcache is a byte array that contains an HFileBlock followed by<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   * its checksums and then the header of the next HFileBlock (needed to help navigate), followed<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   * again by an extra 13 bytes of meta info needed when time to recreate the HFileBlock from cache.<a name="line.144"></a>
+<span class="sourceLineNo">145</span>   *<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   * ++++++++++++++<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   * + HFileBlock +<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   * ++++++++++++++<a name="line.148"></a>
+<span class="sourceLineNo">149</span>   * + Checksums  +<a name="line.149"></a>
+<span class="sourceLineNo">150</span>   * ++++++++++++++<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * + NextHeader +<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * ++++++++++++++<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * + ExtraMeta! +<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * ++++++++++++++<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   *<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * TODO: Fix it so we do NOT put the NextHeader into blockcache. It is not necessary.<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  static final CacheableDeserializer&lt;Cacheable&gt; blockDeserializer =<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.159"></a>
+<span class="sourceLineNo">160</span>        public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.160"></a>
+<span class="sourceLineNo">161</span>        throws IOException {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>          // Rewind to just before the EXTRA_SERIALIZATION_SPACE.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>          buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>          // Get a new buffer to pass the deserialized HFileBlock for it to 'own'.<a name="line.164"></a>
+<span class="sourceLineNo">165</span>          ByteBuff newByteBuffer;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          if (reuse) {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>            newByteBuffer = buf.slice();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          } else {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>            int len = buf.limit();<a name="line.169"></a>
+<span class="sourceLineNo">170</span>            newByteBuffer = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.170"></a>
+<span class="sourceLineNo">171</span>            newByteBuffer.put(0, buf, buf.position(), len);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>          }<a name="line.172"></a>
+<span class="sourceLineNo">173</span>          // Read out the EXTRA_SERIALIZATION_SPACE content and shove into our HFileBlock.<a name="line.173"></a>
+<span class="sourceLineNo">174</span>          buf.position(buf.limit());<a name="line.174"></a>
+<span class="sourceLineNo">175</span>          buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);<a name="line.175"></a>
+<span class="sourceLineNo">176</span>          boolean usesChecksum = buf.get() == (byte)1;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>          HFileBlock hFileBlock = new HFileBlock(newByteBuffer, usesChecksum, memType);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>          hFileBlock.offset = buf.getLong();<a name="line.178"></a>
+<span class="sourceLineNo">179</span>          hFileBlock.nextBlockOnDiskSizeWithHeader = buf.getInt();<a name="line.179"></a>
+<span class="sourceLineNo">180</span>          if (hFileBlock.hasNextBlockHeader()) {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>            hFileBlock.buf.limit(hFileBlock.buf.limit() - hFileBlock.headerSize());<a name="line.181"></a>
+<span class="sourceLineNo">182</span>          }<a name="line.182"></a>
+<span class="sourceLineNo">183</span>          return hFileBlock;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>        @Override<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        public int getDeserialiserIdentifier() {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>          return deserializerIdentifier;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>        }<a name="line.189"></a>
 <span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>  /**<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * The offset of this block in the file. Populated by the reader for<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * convenience of access. This offset is not part of the block header.<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  private long offset = -1;<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * The on-disk size of the next block, including the header, obtained by<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * peeking into the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the next block's<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * header, or -1 if unknown.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  private int nextBlockOnDiskSizeWithHeader = -1;<a name="line.202"></a>
+<span class="sourceLineNo">191</span>        @Override<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>          // Used only in tests<a name="line.193"></a>
+<span class="sourceLineNo">194</span>          return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>        }<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      };<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  private static final int deserializerIdentifier;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  static {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    deserializerIdentifier = CacheableDeserializerIdManager<a name="line.200"></a>
+<span class="sourceLineNo">201</span>        .registerDeserializer(blockDeserializer);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
 <span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * is mostly used when the block data has already been read and uncompressed,<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   * and is sitting in a byte buffer.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>   *<a name="line.210"></a>
-<span class="sourceLineNo">211</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.213"></a>
-<span class="sourceLineNo">214</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.214"></a>
-<span class="sourceLineNo">215</span>   * @param buf block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes) followed by<a name="line.215"></a>
-<span class="sourceLineNo">216</span>   *          uncompressed data. This<a name="line.216"></a>
-<span class="sourceLineNo">217</span>   * @param fillHeader when true, parse {@code buf} and override the first 4 header fields.<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * @param offset the file offset the block was read from<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.219"></a>
-<span class="sourceLineNo">220</span>   * @param fileContext HFile meta data<a name="line.220"></a>
-<span class="sourceLineNo">221</span>   */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      long prevBlockOffset, ByteBuff buf, boolean fillHeader, long offset,<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    this.blockType = blockType;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    this.buf = buf;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    this.offset = offset;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    this.fileContext = fileContext;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    if (fillHeader)<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      overwriteHeader();<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    this.buf.rewind();<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  }<a name="line.236"></a>
+<span class="sourceLineNo">204</span>  /** Type of block. Header field 0. */<a name="line.204"></a>
+<span class="sourceLineNo">205</span>  private BlockType blockType;<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  /**<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * Size on disk excluding header, including checksum. Header field 1.<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   */<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  private int onDiskSizeWithoutHeader;<a name="line.211"></a>
+<span class="sourceLineNo">212</span><a name="line.212"></a>
+<span class="sourceLineNo">213</span>  /**<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   * Size of pure data. Does not include header or checksums. Header field 2.<a name="line.214"></a>
+<span class="sourceLineNo">215</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.215"></a>
+<span class="sourceLineNo">216</span>   */<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  private final int uncompressedSizeWithoutHeader;<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  /**<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   * The offset of the previous block on disk. Header field 3.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   */<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  private final long prevBlockOffset;<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  /**<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private final int onDiskDataSizeWithHeader;<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  /** The in-memory representation of the hfile block */<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  private ByteBuff buf;<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>  /** Meta data that holds meta information on the hfileblock */<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private HFileContext fileContext;<a name="line.236"></a>
 <span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    this(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset,<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        new SingleByteBuff(buf), fillHeader, offset, onDiskDataSizeWithHeader, fileContext);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  }<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /**<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.246"></a>
-<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  HFileBlock(HFileBlock that) {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    this.blockType = that.blockType;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    this.prevBlockOffset = that.prevBlockOffset;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    this.buf = that.buf.duplicate();<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this.offset = that.offset;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    this.fileContext = that.fileContext;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
-<span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>  HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    this(new SingleByteBuff(b), usesHBaseChecksum);<a name="line.261"></a>
-<span class="sourceLineNo">262</span>  }<a name="line.262"></a>
-<span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * to that point.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum) throws IOException {<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this(b, usesHBaseChecksum, MemoryType.EXCLUSIVE);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  /**<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * to that point.<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum, MemoryType memType) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    b.rewind();<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    blockType = BlockType.read(b);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    onDiskSizeWithoutHeader = b.getInt();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    uncompressedSizeWithoutHeader = b.getInt();<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    prevBlockOffset = b.getLong();<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    HFileContextBuilder contextBuilder = new HFileContextBuilder();<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    contextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    if (usesHBaseChecksum) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>      contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      contextBuilder.withBytesPerCheckSum(b.getInt());<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      this.onDiskDataSizeWithHeader = b.getInt();<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    } else {<a name="line.292"></a>
-<span class="sourceLineNo">293</span>      contextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      contextBuilder.withBytesPerCheckSum(0);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader +<a name="line.295"></a>
-<span class="sourceLineNo">296</span>                                       HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    this.fileContext = contextBuilder.build();<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    this.memType = memType;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    buf = b;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    buf.rewind();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
-<span class="sourceLineNo">303</span><a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public BlockType getBlockType() {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    return blockType;<a name="line.305"></a>
+<span class="sourceLineNo">238</span>  /**<a name="line.238"></a>
+<span class="sourceLineNo">239</span>   * The offset of this block in the file. Populated by the reader for<a name="line.239"></a>
+<span class="sourceLineNo">240</span>   * convenience of access. This offset is not part of the block header.<a name="line.240"></a>
+<span class="sourceLineNo">241</span>   */<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  private long offset = UNSET;<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>  /**<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * The on-disk size of the next block, including the header, obtained by<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * peeking into the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the next block's<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   * header, or -1 if unknown.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>   */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  private int nextBlockOnDiskSizeWithHeader = UNSET;<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>  /**<a name="line.253"></a>
+<span class="sourceLineNo">254</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.254"></a>
+<span class="sourceLineNo">255</span>   * is used when the block data has already been read and uncompressed,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>   * and is sitting in a byte buffer.<a name="line.256"></a>
+<span class="sourceLineNo">257</span>   *<a name="line.257"></a>
+<span class="sourceLineNo">258</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.258"></a>
+<span class="sourceLineNo">259</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.259"></a>
+<span class="sourceLineNo">260</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.260"></a>
+<span class="sourceLineNo">261</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.261"></a>
+<span class="sourceLineNo">262</span>   * @param buf block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes) followed by<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   *          uncompressed data.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @param offset the file offset the block was read from<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * @param fileContext HFile meta data<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   */<a name="line.268"></a>
+<span class="sourceLineNo">269</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      long prevBlockOffset, ByteBuff buf, boolean fillHeader, long offset,<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.blockType = blockType;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    this.buf = buf;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    this.offset = offset;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    this.fileContext = fileContext;<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    if (fillHeader) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      overwriteHeader();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.buf.rewind();<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    this(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset,<a name="line.289"></a>
+<span class="sourceLineNo">290</span>        new SingleByteBuff(buf), fillHeader, offset, onDiskDataSizeWithHeader, fileContext);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>  /**<a name="line.293"></a>
+<span class="sourceLineNo">294</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.294"></a>
+<span class="sourceLineNo">295</span>   */<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  HFileBlock(HFileBlock that) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    this.blockType = that.blockType;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    this.prevBlockOffset = that.prevBlockOffset;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    this.buf = that.buf.duplicate();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    this.offset = that.offset;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.fileContext = that.fileContext;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;<a name="line.305"></a>
 <span class="sourceLineNo">306</span>  }<a name="line.306"></a>
 <span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  public short getDataBlockEncodingId() {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.311"></a>
-<span class="sourceLineNo">312</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    return buf.getShort(headerSize());<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  }<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>  /**<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @return the on-disk size of header + data part + checksum.<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   */<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  public int getOnDiskSizeWithHeader() {<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   */<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  public int getOnDiskSizeWithoutHeader() {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    return onDiskSizeWithoutHeader;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  }<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  /**<a name="line.331"></a>
-<span class="sourceLineNo">332</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   public int getUncompressedSizeWithoutHeader() {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    return uncompressedSizeWithoutHeader;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>  }<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   *         -1 if unknown<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public long getPrevBlockOffset() {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    return prevBlockOffset;<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * is modified as side-effect.<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   */<a name="line.349"></a>
-<span class="sourceLineNo">350</span>  private void overwriteHeader() {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    buf.rewind();<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    blockType.write(buf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    buf.putLong(prevBlockOffset);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span>  }<a name="line.361"></a>
-<span class="sourceLineNo">362</span><a name="line.362"></a>
-<span class="sourceLineNo">363</span>  /**<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * Returns a buffer that does not include the header or checksum.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   *<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.366"></a>
+<span class="sourceLineNo">308</span>  HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    this(new SingleByteBuff(b), usesHBaseChecksum);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * to that point.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum) throws IOException {<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    this(b, usesHBaseChecksum, MemoryType.EXCLUSIVE);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * to that point.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   */<a name="line.327"></a>
+<span class="sourceLineNo">328</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum, MemoryType memType) throws IOException {<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    b.rewind();<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    blockType = BlockType.read(b);<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    onDiskSizeWithoutHeader = b.getInt();<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    uncompressedSizeWithoutHeader = b.getInt();<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    prevBlockOffset = b.getLong();<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    HFileContextBuilder contextBuilder = new HFileContextBuilder();<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    contextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    if (usesHBaseChecksum) {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>      contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>      contextBuilder.withBytesPerCheckSum(b.getInt());<a name="line.338"></a>
+<span class="sourceLineNo">339</span>      this.onDiskDataSizeWithHeader = b.getInt();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    } else {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      contextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.341"></a>
+<span class="sourceLineNo">342</span>      contextBuilder.withBytesPerCheckSum(0);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      this.onDiskDataSizeWithHeader =<a name="line.343"></a>
+<span class="sourceLineNo">344</span>          onDiskSizeWithoutHeader + HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    this.fileContext = contextBuilder.build();<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    this.memType = memType;<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    buf = b;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    buf.rewind();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>  }<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>  public BlockType getBlockType() {<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    return blockType;<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  public short getDataBlockEncodingId() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.358"></a>
+<span class="sourceLineNo">359</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    }<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return buf.getShort(headerSize());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the on-disk size of header + data part + checksum.<a name="line.366"></a>
 <span class="sourceLineNo">367</span>   */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ByteBuff dup = this.buf.duplicate();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    dup.position(headerSize());<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    dup.limit(buf.limit() - totalChecksumBytes());<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    return dup.slice();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>  }<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>  /**<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * Returns the buffer this block stores internally. The clients must not<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * modify the buffer object. This method has to be public because it is used<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * filter lookup, but has to be used with caution. Checksum data is not<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * included in the returned buffer but header data is.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   *<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * @return the buffer of this block for read-only operations<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public ByteBuff getBufferReadOnly() {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    ByteBuff dup = this.buf.duplicate();<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    dup.limit(buf.limit() - totalChecksumBytes());<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    return dup.slice();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>  /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>   * Returns the buffer of this block, including header data. The clients must<a name="line.391"></a>
-<span class="sourceLineNo">392</span>   * not modify the buffer object. This method has to be public because it is<a name="line.392"></a>
-<span class="sourceLineNo">393</span>   * used in {@link org.apache.hadoop.hbase.io.hfile.bucket.BucketCache} to avoid buffer copy.<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   *<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @return the buffer with header and checksum included for read-only operations<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   */<a name="line.396"></a>
-<span class="sourceLineNo">397</span>  public ByteBuff getBufferReadOnlyWithHeader() {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    ByteBuff dup = this.buf.duplicate();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    return dup.slice();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>  /**<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   * Returns a byte buffer of this block, including header data and checksum, positioned at<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * the beginning of header. The underlying data array is not copied.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   *<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @return the byte buffer with header and checksum included<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  ByteBuff getBufferWithHeader() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    ByteBuff dupBuf = buf.duplicate();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    dupBuf.rewind();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    return dupBuf;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      String fieldName) throws IOException {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    if (valueFromBuf != valueFromField) {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.417"></a>
-<span class="sourceLineNo">418</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    }<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.422"></a>
-<span class="sourceLineNo">423</span>      throws IOException {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (valueFromBuf != valueFromField) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   * This function is primary for testing and debugging, and is not<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  void sanityCheck() throws IOException {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    buf.rewind();<a name="line.438"></a>
-<span class="sourceLineNo">439</span><a name="line.439"></a>
-<span class="sourceLineNo">440</span>    sanityCheckAssertion(BlockType.read(buf), blockType);<a name="line.440"></a>
-<span class="sourceLineNo">441</span><a name="line.441"></a>
-<span class="sourceLineNo">442</span>    sanityCheckAssertion(buf.getInt(), onDiskSizeWithoutHeader,<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        "onDiskSizeWithoutHeader");<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    sanityCheckAssertion(buf.getInt(), uncompressedSizeWithoutHeader,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        "uncompressedSizeWithoutHeader");<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>    sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset");<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.451"></a>
-<span class="sourceLineNo">452</span>          "bytesPerChecksum");<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    }<a name="line.454"></a>
-<span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>    int cksumBytes = totalChecksumBytes();<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    if (buf.limit() != expectedBufLimit) {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      throw new AssertionError("Expected buffer limit " + expectedBufLimit<a name="line.459"></a>
-<span class="sourceLineNo">460</span>          + ", got " + buf.limit());<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    int hdrSize = headerSize();<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    if (buf.capacity() != expectedBufLimit &amp;&amp;<a name="line.466"></a>
-<span class="sourceLineNo">467</span>        buf.capacity() != expectedBufLimit + hdrSize) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      throw new AssertionError("Invalid buffer capacity: " + buf.capacity() +<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    }<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>  @Override<a name="line.473"></a>
-<span class="sourceLineNo">474</span>  public String toString() {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    StringBuilder sb = new StringBuilder()<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      .append("HFileBlock [")<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      .append(" fileOffset=").append(offset)<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      .append(" headerSize()=").append(headerSize())<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      .append(" blockType=").append(blockType)<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      .append(" onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      .append(" uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      .append(" prevBlockOffset=").append(prevBlockOffset)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      .append(" isUseHBaseChecksum()=").append(fileContext.isUseHBaseChecksum());<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      sb.append(" checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        .append(" bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        .append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    } else {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      sb.append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.490"></a>
-<span class="sourceLineNo">491</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    String dataBegin = null;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    if (buf.hasArray()) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    } else {<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    }<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    sb.append(" getOnDiskSizeWithHeader()=").append(getOnDiskSizeWithHeader())<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      .append(" totalChecksumBytes()=").append(totalChecksumBytes())<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      .append(" isUnpacked()=").append(isUnpacked())<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      .append(" buf=[ ").append(buf).append(" ]")<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      .append(" dataBeginsWith=").append(dataBegin)<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      .append(" fileContext=").append(fileContext)<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      .append(" ]");<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    return sb.toString();<a name="line.511"></a>
-<span class="sourceLineNo">512</span>  }<a name="line.512"></a>
-<span class="sourceLineNo">513</span><a name="line.513"></a>
-<span class="sourceLineNo">514</span>  /**<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * Called after reading a block with provided onDiskSizeWithHeader.<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   */<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  private void validateOnDiskSizeWithoutHeader(int expectedOnDiskSizeWithoutHeader)<a name="line.517"></a>
-<span class="sourceLineNo">518</span>  throws IOException {<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    if (onDiskSizeWithoutHeader != expectedOnDiskSizeWithoutHeader) {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      String dataBegin = null;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>      if (buf.hasArray()) {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>        dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset(), Math.min(32, buf.limit()));<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      } else {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        ByteBuff bufDup = getBufferReadOnly();<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        byte[] dataBeginBytes = new byte[Math.min(32, bufDup.limit() - bufDup.position())];<a name="line.525"></a>
-<span class="sourceLineNo">526</span>        bufDup.get(dataBeginBytes);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String blockInfoMsg =<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        "Block offset: " + offset + ", data starts with: " + dataBegin;<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new IOException("On-disk size without header provided is "<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + expectedOnDiskSizeWithoutHeader + ", but block "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>          + "header contains " + onDiskSizeWithoutHeader + ". " +<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          blockInfoMsg);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  }<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>  /**<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      // encryption details.<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      return this;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span><a name="line.549"></a>
-<span class="sourceLineNo">550</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.551"></a>
-<span class="sourceLineNo">552</span><a name="line.552"></a>
-<span class="sourceLineNo">553</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.553"></a>
-<span clas

<TRUNCATED>

[15/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
index d8b6ca7..66dbcf3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.SeekerState.html
@@ -31,12 +31,12 @@
 <span class="sourceLineNo">023</span>import java.nio.ByteBuffer;<a name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.ByteBufferedCell;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.Cell;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HConstants;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.31"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.Cell;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.31"></a>
 <span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.32"></a>
 <span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.34"></a>
@@ -60,1113 +60,1115 @@
 <span class="sourceLineNo">052</span> */<a name="line.52"></a>
 <span class="sourceLineNo">053</span>@InterfaceAudience.Private<a name="line.53"></a>
 <span class="sourceLineNo">054</span>abstract class BufferedDataBlockEncoder implements DataBlockEncoder {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static int INITIAL_KEY_BUFFER_SIZE = 512;<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  @Override<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public ByteBuffer decodeKeyValues(DataInputStream source,<a name="line.59"></a>
-<span class="sourceLineNo">060</span>      HFileBlockDecodingContext blkDecodingCtx) throws IOException {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      throw new IOException(this.getClass().getName() + " only accepts "<a name="line.62"></a>
-<span class="sourceLineNo">063</span>          + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>    HFileBlockDefaultDecodingContext decodingCtx =<a name="line.66"></a>
-<span class="sourceLineNo">067</span>        (HFileBlockDefaultDecodingContext) blkDecodingCtx;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    if (decodingCtx.getHFileContext().isIncludesTags()<a name="line.68"></a>
-<span class="sourceLineNo">069</span>        &amp;&amp; decodingCtx.getHFileContext().isCompressTags()) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      if (decodingCtx.getTagCompressionContext() != null) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        // It will be overhead to create the TagCompressionContext again and again for every block<a name="line.71"></a>
-<span class="sourceLineNo">072</span>        // decoding.<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        decodingCtx.getTagCompressionContext().clear();<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      } else {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>        try {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>          TagCompressionContext tagCompressionContext = new TagCompressionContext(<a name="line.76"></a>
-<span class="sourceLineNo">077</span>              LRUDictionary.class, Byte.MAX_VALUE);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>          decodingCtx.setTagCompressionContext(tagCompressionContext);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>        } catch (Exception e) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>          throw new IOException("Failed to initialize TagCompressionContext", e);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>        }<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    return internalDecodeKeyValues(source, 0, 0, decodingCtx);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  /********************* common prefixes *************************/<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  // Having this as static is fine but if META is having DBE then we should<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  // change this.<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public static int compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    return Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        left.getRowLength() - rowCommonPrefix, right.getRowArray(), right.getRowOffset()<a name="line.92"></a>
-<span class="sourceLineNo">093</span>            + rowCommonPrefix, right.getRowLength() - rowCommonPrefix);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static int compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + familyCommonPrefix,<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        left.getFamilyLength() - familyCommonPrefix, right.getFamilyArray(),<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        right.getFamilyOffset() + familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public static int compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + qualCommonPrefix,<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        left.getQualifierLength() - qualCommonPrefix, right.getQualifierArray(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        right.getQualifierOffset() + qualCommonPrefix, right.getQualifierLength()<a name="line.105"></a>
-<span class="sourceLineNo">106</span>            - qualCommonPrefix);<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected static class SeekerState {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    protected ByteBuff currentBuffer;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    protected TagCompressionContext tagCompressionContext;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    protected int valueOffset = -1;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    protected int keyLength;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    protected int valueLength;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    protected int lastCommonPrefix;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    protected int tagsLength = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    protected int tagsOffset = -1;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    protected int tagsCompressedLength = 0;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    protected boolean uncompressTags = true;<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    /** We need to store a copy of the key. */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    protected byte[] keyBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    protected byte[] tagsBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    protected long memstoreTS;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    protected int nextKvOffset;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    protected KeyValue.KeyOnlyKeyValue currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    // many object creations.<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    private final ObjectIntPair&lt;ByteBuffer&gt; tmpPair;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    private final boolean includeTags;<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public SeekerState(ObjectIntPair&lt;ByteBuffer&gt; tmpPair, boolean includeTags) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      this.tmpPair = tmpPair;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      this.includeTags = includeTags;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    protected boolean isValid() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      return valueOffset != -1;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    protected void invalidate() {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      valueOffset = -1;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      tagsCompressedLength = 0;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      uncompressTags = true;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      currentBuffer = null;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    protected void ensureSpaceForKey() {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (keyLength &gt; keyBuffer.length) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        // rare case, but we need to handle arbitrary length of key<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        int newKeyBufferLength = Math.max(keyBuffer.length, 1) * 2;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        while (keyLength &gt; newKeyBufferLength) {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>          newKeyBufferLength *= 2;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        }<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        byte[] newKeyBuffer = new byte[newKeyBufferLength];<a name="line.157"></a>
-<span class="sourceLineNo">158</span>        System.arraycopy(keyBuffer, 0, newKeyBuffer, 0, keyBuffer.length);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        keyBuffer = newKeyBuffer;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>    protected void ensureSpaceForTags() {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      if (tagsLength &gt; tagsBuffer.length) {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        // rare case, but we need to handle arbitrary length of tags<a name="line.165"></a>
-<span class="sourceLineNo">166</span>        int newTagsBufferLength = Math.max(tagsBuffer.length, 1) * 2;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        while (tagsLength &gt; newTagsBufferLength) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>          newTagsBufferLength *= 2;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        byte[] newTagsBuffer = new byte[newTagsBufferLength];<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        System.arraycopy(tagsBuffer, 0, newTagsBuffer, 0, tagsBuffer.length);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        tagsBuffer = newTagsBuffer;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    protected void setKey(byte[] keyBuffer, long memTS) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      currentKey.setKey(keyBuffer, 0, keyLength);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      memstoreTS = memTS;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    /**<a name="line.181"></a>
-<span class="sourceLineNo">182</span>     * Copy the state from the next one into this instance (the previous state<a name="line.182"></a>
-<span class="sourceLineNo">183</span>     * placeholder). Used to save the previous state when we are advancing the<a name="line.183"></a>
-<span class="sourceLineNo">184</span>     * seeker to the next key/value.<a name="line.184"></a>
-<span class="sourceLineNo">185</span>     */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    protected void copyFromNext(SeekerState nextState) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      if (keyBuffer.length != nextState.keyBuffer.length) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>        keyBuffer = nextState.keyBuffer.clone();<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      } else if (!isValid()) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        // Note: we can only call isValid before we override our state, so this<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        // comes before all the assignments at the end of this method.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        System.arraycopy(nextState.keyBuffer, 0, keyBuffer, 0,<a name="line.192"></a>
-<span class="sourceLineNo">193</span>             nextState.keyLength);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      } else {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        // don't copy the common prefix between this key and the previous one<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        System.arraycopy(nextState.keyBuffer, nextState.lastCommonPrefix,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>            keyBuffer, nextState.lastCommonPrefix, nextState.keyLength<a name="line.197"></a>
-<span class="sourceLineNo">198</span>                - nextState.lastCommonPrefix);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      }<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      currentKey = nextState.currentKey;<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>      valueOffset = nextState.valueOffset;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      keyLength = nextState.keyLength;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      valueLength = nextState.valueLength;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      lastCommonPrefix = nextState.lastCommonPrefix;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      nextKvOffset = nextState.nextKvOffset;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      memstoreTS = nextState.memstoreTS;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      currentBuffer = nextState.currentBuffer;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      tagsOffset = nextState.tagsOffset;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      tagsLength = nextState.tagsLength;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (nextState.tagCompressionContext != null) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        tagCompressionContext = nextState.tagCompressionContext;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    public Cell toCell() {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      // Buffer backing the value and tags part from the HFileBlock's buffer<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      // When tag compression in use, this will be only the value bytes area.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ByteBuffer valAndTagsBuffer;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      int vOffset;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      int valAndTagsLength = this.valueLength;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      int tagsLenSerializationSize = 0;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      if (this.includeTags &amp;&amp; this.tagCompressionContext == null) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        // Include the tags part also. This will be the tags bytes + 2 bytes of for storing tags<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        // length<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        tagsLenSerializationSize = this.tagsOffset - (this.valueOffset + this.valueLength);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        valAndTagsLength += tagsLenSerializationSize + this.tagsLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      this.currentBuffer.asSubByteBuffer(this.valueOffset, valAndTagsLength, this.tmpPair);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      valAndTagsBuffer = this.tmpPair.getFirst();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      vOffset = this.tmpPair.getSecond();// This is the offset to value part in the BB<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      if (valAndTagsBuffer.hasArray()) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        return toOnheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      } else {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        return toOffheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private Cell toOnheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>        int tagsLenSerializationSize) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      byte[] tagsArray = HConstants.EMPTY_BYTE_ARRAY;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      int tOffset = 0;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      if (this.includeTags) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        if (this.tagCompressionContext == null) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          tagsArray = valAndTagsBuffer.array();<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          tOffset = valAndTagsBuffer.arrayOffset() + vOffset + this.valueLength<a name="line.246"></a>
-<span class="sourceLineNo">247</span>              + tagsLenSerializationSize;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        } else {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          tagsArray = Bytes.copy(tagsBuffer, 0, this.tagsLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          tOffset = 0;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      return new OnheapDecodedCell(Bytes.copy(keyBuffer, 0, this.keyLength),<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer.array(),<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          valAndTagsBuffer.arrayOffset() + vOffset, this.valueLength, memstoreTS, tagsArray,<a name="line.257"></a>
-<span class="sourceLineNo">258</span>          tOffset, this.tagsLength);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>    private Cell toOffheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        int tagsLenSerializationSize) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      ByteBuffer tagsBuf =  HConstants.EMPTY_BYTE_BUFFER;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      int tOffset = 0;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (this.includeTags) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        if (this.tagCompressionContext == null) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          tagsBuf = valAndTagsBuffer;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          tOffset = vOffset + this.valueLength + tagsLenSerializationSize;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        } else {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          tagsBuf = ByteBuffer.wrap(Bytes.copy(tagsBuffer, 0, this.tagsLength));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          tOffset = 0;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return new OffheapDecodedCell(ByteBuffer.wrap(Bytes.copy(keyBuffer, 0, this.keyLength)),<a name="line.274"></a>
-<span class="sourceLineNo">275</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.276"></a>
-<span class="sourceLineNo">277</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer, vOffset,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          this.valueLength, memstoreTS, tagsBuf, tOffset, this.tagsLength);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    }<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * Copies only the key part of the keybuffer by doing a deep copy and passes the<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   * seeker state members for taking a clone.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Note that the value byte[] part is still pointing to the currentBuffer and<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * represented by the valueOffset and valueLength<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  // We return this as a Cell to the upper layers of read flow and might try setting a new SeqId<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  // there. So this has to be an instance of SettableSequenceId.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  protected static class OnheapDecodedCell implements Cell, HeapSize, SettableSequenceId,<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      Streamable {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.293"></a>
-<span class="sourceLineNo">294</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY));<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    private byte[] keyOnlyBuffer;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    private short rowLength;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    private int familyOffset;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    private byte familyLength;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    private int qualifierOffset;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    private int qualifierLength;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    private long timestamp;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    private byte typeByte;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    private byte[] valueBuffer;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    private int valueOffset;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    private int valueLength;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    private byte[] tagsBuffer;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    private int tagsOffset;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    private int tagsLength;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    private long seqId;<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>    protected OnheapDecodedCell(byte[] keyBuffer, short rowLength, int familyOffset,<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        byte[] valueBuffer, int valueOffset, int valueLen, long seqId, byte[] tagsBuffer,<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        int tagsOffset, int tagsLength) {<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      this.keyOnlyBuffer = keyBuffer;<a name="line.315"></a>
-<span class="sourceLineNo">316</span>      this.rowLength = rowLength;<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      this.familyOffset = familyOffset;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.familyLength = familyLength;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>      this.qualifierOffset = qualOffset;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.qualifierLength = qualLength;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>      this.timestamp = timeStamp;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      this.typeByte = typeByte;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>      this.valueBuffer = valueBuffer;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      this.valueOffset = valueOffset;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      this.valueLength = valueLen;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      this.tagsBuffer = tagsBuffer;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      this.tagsOffset = tagsOffset;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      this.tagsLength = tagsLength;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      setSequenceId(seqId);<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>    @Override<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    public byte[] getRowArray() {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      return keyOnlyBuffer;<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    }<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>    @Override<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    public byte[] getFamilyArray() {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      return keyOnlyBuffer;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>    @Override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    public byte[] getQualifierArray() {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>      return keyOnlyBuffer;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
-<span class="sourceLineNo">346</span><a name="line.346"></a>
-<span class="sourceLineNo">347</span>    @Override<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    public int getRowOffset() {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      return Bytes.SIZEOF_SHORT;<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>    @Override<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    public short getRowLength() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      return rowLength;<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>    @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    public int getFamilyOffset() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return familyOffset;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>    @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    public byte getFamilyLength() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      return familyLength;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>    @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    public int getQualifierOffset() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      return qualifierOffset;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    }<a name="line.370"></a>
-<span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>    @Override<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    public int getQualifierLength() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      return qualifierLength;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>    @Override<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    public long getTimestamp() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      return timestamp;<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>    @Override<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    public byte getTypeByte() {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return typeByte;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>    @Override<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    public long getSequenceId() {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      return seqId;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>    @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    public byte[] getValueArray() {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      return this.valueBuffer;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    @Override<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    public int getValueOffset() {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      return valueOffset;<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>    @Override<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    public int getValueLength() {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      return valueLength;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    @Override<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    public byte[] getTagsArray() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      return this.tagsBuffer;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    @Override<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    public int getTagsOffset() {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      return this.tagsOffset;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    }<a name="line.415"></a>
-<span class="sourceLineNo">416</span><a name="line.416"></a>
-<span class="sourceLineNo">417</span>    @Override<a name="line.417"></a>
-<span class="sourceLineNo">418</span>    public int getTagsLength() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      return tagsLength;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    @Override<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public String toString() {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          + getValueLength() + "/seqid=" + seqId;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    public void setSequenceId(long seqId) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      this.seqId = seqId;<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    public long heapSize() {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    public int write(OutputStream out) throws IOException {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      return write(out, true);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>          tagsLength, withTags);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      ByteBufferUtils.putInt(out, keyOnlyBuffer.length);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // Write key<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      out.write(keyOnlyBuffer);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      // Write value<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      out.write(this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      if (withTags) {<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        // 2 bytes tags length followed by tags bytes<a name="line.455"></a>
-<span class="sourceLineNo">456</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        out.write(this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  protected static class OffheapDecodedCell extends ByteBufferedCell implements HeapSize,<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      SettableSequenceId, Streamable {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.BYTE_BUFFER));<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    private ByteBuffer keyBuffer;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    private short rowLength;<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    private int familyOffset;<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    private byte familyLength;<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    private int qualifierOffset;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    private int qualifierLength;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    private long timestamp;<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    private byte typeByte;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    private ByteBuffer valueBuffer;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    private int valueOffset;<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    private int valueLength;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private ByteBuffer tagsBuffer;<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    private int tagsOffset;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    private int tagsLength;<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    private long seqId;<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>    protected OffheapDecodedCell(ByteBuffer keyBuffer, short rowLength, int familyOffset,<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.488"></a>
-<span class="sourceLineNo">489</span>        ByteBuffer valueBuffer, int valueOffset, int valueLen, long seqId, ByteBuffer tagsBuffer,<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        int tagsOffset, int tagsLength) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      // The keyBuffer is always onheap<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      assert keyBuffer.hasArray();<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      assert keyBuffer.arrayOffset() == 0;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      this.keyBuffer = keyBuffer;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      this.rowLength = rowLength;<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      this.familyOffset = familyOffset;<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      this.familyLength = familyLength;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      this.qualifierOffset = qualOffset;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      this.qualifierLength = qualLength;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      this.timestamp = timeStamp;<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      this.typeByte = typeByte;<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      this.valueBuffer = valueBuffer;<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      this.valueOffset = valueOffset;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      this.valueLength = valueLen;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      this.tagsBuffer = tagsBuffer;<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      this.tagsOffset = tagsOffset;<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      this.tagsLength = tagsLength;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      setSequenceId(seqId);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public byte[] getRowArray() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return this.keyBuffer.array();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getRowOffset() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return getRowPosition();<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public short getRowLength() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowLength;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span><a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public byte[] getFamilyArray() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return this.keyBuffer.array();<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public int getFamilyOffset() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return getFamilyPosition();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public byte getFamilyLength() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return this.familyLength;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span><a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public byte[] getQualifierArray() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.keyBuffer.array();<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierOffset() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return getQualifierPosition();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public int getQualifierLength() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.qualifierLength;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public long getTimestamp() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return this.timestamp;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public byte getTypeByte() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.typeByte;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public long getSequenceId() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.seqId;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public byte[] getValueArray() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return CellUtil.cloneValue(this);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public int getValueOffset() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      return 0;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>    @Override<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    public int getValueLength() {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return this.valueLength;<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>    @Override<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    public byte[] getTagsArray() {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      return CellUtil.cloneTags(this);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    }<a name="line.589"></a>
-<span class="sourceLineNo">590</span><a name="line.590"></a>
-<span class="sourceLineNo">591</span>    @Override<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    public int getTagsOffset() {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      return 0;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    }<a name="line.594"></a>
-<span class="sourceLineNo">595</span><a name="line.595"></a>
-<span class="sourceLineNo">596</span>    @Override<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    public int getTagsLength() {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      return this.tagsLength;<a name="line.598"></a>
-<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
-<span class="sourceLineNo">600</span><a name="line.600"></a>
-<span class="sourceLineNo">601</span>    @Override<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    public ByteBuffer getRowByteBuffer() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return this.keyBuffer;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    @Override<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    public int getRowPosition() {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      return Bytes.SIZEOF_SHORT;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    @Override<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      return this.keyBuffer;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>    @Override<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    public int getFamilyPosition() {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      return this.familyOffset;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    }<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    @Override<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      return this.keyBuffer;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    }<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>    @Override<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    public int getQualifierPosition() {<a name="line.627"></a>
-<span class="sourceLineNo">628</span>      return this.qualifierOffset;<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    @Override<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    public ByteBuffer getValueByteBuffer() {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      return this.valueBuffer;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
-<span class="sourceLineNo">635</span><a name="line.635"></a>
-<span class="sourceLineNo">636</span>    @Override<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    public int getValuePosition() {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this.valueOffset;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    @Override<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      return this.tagsBuffer;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
-<span class="sourceLineNo">645</span><a name="line.645"></a>
-<span class="sourceLineNo">646</span>    @Override<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    public int getTagsPosition() {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return this.tagsOffset;<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
-<span class="sourceLineNo">650</span><a name="line.650"></a>
-<span class="sourceLineNo">651</span>    @Override<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    public long heapSize() {<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>    @Override<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    public void setSequenceId(long seqId) {<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      this.seqId = seqId;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span><a name="line.660"></a>
-<span class="sourceLineNo">661</span>    @Override<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    public int write(OutputStream out) throws IOException {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>      return write(out, true);<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    }<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    @Override<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          tagsLength, withTags);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      ByteBufferUtils.putInt(out, keyBuffer.capacity());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      // Write key<a name="line.673"></a>
-<span class="sourceLineNo">674</span>      out.write(keyBuffer.array());<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      // Write value<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      ByteBufferUtils.copyBufferToStream(out, this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      if (withTags) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>        // 2 bytes tags length followed by tags bytes<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.682"></a>
-<span class="sourceLineNo">683</span>        ByteBufferUtils.copyBufferToStream(out, this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  protected abstract static class<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      BufferedEncodedSeeker&lt;STATE extends SeekerState&gt;<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      implements EncodedSeeker {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    protected HFileBlockDecodingContext decodingCtx;<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    protected final CellComparator comparator;<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    protected ByteBuff currentBuffer;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    protected TagCompressionContext tagCompressionContext = null;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    protected  KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    // many object creations.<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    protected final ObjectIntPair&lt;ByteBuffer&gt; tmpPair = new ObjectIntPair&lt;ByteBuffer&gt;();<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    protected STATE current, previous;<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>    public BufferedEncodedSeeker(CellComparator comparator,<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        HFileBlockDecodingContext decodingCtx) {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      this.comparator = comparator;<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.decodingCtx = decodingCtx;<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      if (decodingCtx.getHFileContext().isCompressTags()) {<a name="line.706"></a>
-<span class="sourceLineNo">707</span>        try {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>          tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>        } catch (Exception e) {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>          throw new RuntimeException("Failed to initialize TagCompressionContext", e);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>        }<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      }<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      current = createSeekerState(); // always valid<a name="line.713"></a>
-<span class="sourceLineNo">714</span>      previous = createSeekerState(); // may not be valid<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    }<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>    protected boolean includesMvcc() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>      return this.decodingCtx.getHFileContext().isIncludesMvcc();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    }<a name="line.719"></a>
-<span class="sourceLineNo">720</span><a name="line.720"></a>
-<span class="sourceLineNo">721</span>    protected boolean includesTags() {<a name="line.721"></a>
-<span class="sourceLineNo">722</span>      return this.decodingCtx.getHFileContext().isIncludesTags();<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    }<a name="line.723"></a>
-<span class="sourceLineNo">724</span><a name="line.724"></a>
-<span class="sourceLineNo">725</span>    @Override<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    public int compareKey(CellComparator comparator, Cell key) {<a name="line.726"></a>
-<span class="sourceLineNo">727</span>      keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return comparator.compareKeyIgnoresMvcc(key, keyOnlyKV);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    public void setCurrentBuffer(ByteBuff buffer) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      if (this.tagCompressionContext != null) {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        this.tagCompressionContext.clear();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      currentBuffer = buffer;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>      current.currentBuffer = currentBuffer;<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      if(tagCompressionContext != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>        current.tagCompressionContext = tagCompressionContext;<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>      decodeFirst();<a name="line.741"></a>
-<span class="sourceLineNo">742</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      previous.invalidate();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public Cell getKey() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      byte[] key = new byte[current.keyLength];<a name="line.748"></a>
-<span class="sourceLineNo">749</span>      System.arraycopy(current.keyBuffer, 0, key, 0, current.keyLength);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return new KeyValue.KeyOnlyKeyValue(key);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public ByteBuffer getValueShallowCopy() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      currentBuffer.asSubByteBuffer(current.valueOffset, current.valueLength, tmpPair);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      ByteBuffer dup = tmpPair.getFirst().duplicate();<a name="line.756"></a>
-<span class="sourceLineNo">757</span>      dup.position(tmpPair.getSecond());<a name="line.757"></a>
-<span class="sourceLineNo">758</span>      dup.limit(tmpPair.getSecond() + current.valueLength);<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      return dup.slice();<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    public Cell getCell() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return current.toCell();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span><a name="line.766"></a>
-<span class="sourceLineNo">767</span>    @Override<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    public void rewind() {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      currentBuffer.rewind();<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      if (tagCompressionContext != null) {<a name="line.770"></a>
-<span class="sourceLineNo">771</span>        tagCompressionContext.clear();<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      }<a name="line.772"></a>
-<span class="sourceLineNo">773</span>      decodeFirst();<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      previous.invalidate();<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public boolean next() {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      if (!currentBuffer.hasRemaining()) {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        return false;<a name="line.781"></a>
-<span class="sourceLineNo">782</span>      }<a name="line.782"></a>
-<span class="sourceLineNo">783</span>      decodeNext();<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      previous.invalidate();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>      return true;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span><a name="line.788"></a>
-<span class="sourceLineNo">789</span>    protected void decodeTags() {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      current.tagsLength = ByteBuff.readCompressedInt(currentBuffer);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      if (tagCompressionContext != null) {<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        if (current.uncompressTags) {<a name="line.792"></a>
-<span class="sourceLineNo">793</span>          // Tag compression is been used. uncompress it into tagsBuffer<a name="line.793"></a>
-<span class="sourceLineNo">794</span>          current.ensureSpaceForTags();<a name="line.794"></a>
-<span class="sourceLineNo">795</span>          try {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            current.tagsCompressedLength = tagCompressionContext.uncompressTags(currentBuffer,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>                current.tagsBuffer, 0, current.tagsLength);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>          } catch (IOException e) {<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            throw new RuntimeException("Exception while uncompressing tags", e);<a name="line.799"></a>
-<span class="sourceLineNo">800</span>          }<a name="line.800"></a>
-<span class="sourceLineNo">801</span>        } else {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>          currentBuffer.skip(current.tagsCompressedLength);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>          current.uncompressTags = true;// Reset this.<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        }<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        current.tagsOffset = -1;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>      } else {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        // When tag compress is not used, let us not do copying of tags bytes into tagsBuffer.<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // Just mark the tags Offset so as to create the KV buffer later in getKeyValueBuffer()<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        current.tagsOffset = currentBuffer.position();<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        currentBuffer.skip(current.tagsLength);<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      }<a name="line.811"></a>
-<span class="sourceLineNo">812</span>    }<a name="line.812"></a>
-<span class="sourceLineNo">813</span><a name="line.813"></a>
-<span class="sourceLineNo">814</span>    @Override<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      int rowCommonPrefix = 0;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      int familyCommonPrefix = 0;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      int qualCommonPrefix = 0;<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      previous.invalidate();<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      do {<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        int comp;<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.822"></a>
-<span class="sourceLineNo">823</span>        if (current.lastCommonPrefix != 0) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>          // The KV format has row key length also in the byte array. The<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          // common prefix<a name="line.825"></a>
-<span class="sourceLineNo">826</span>          // includes it. So we need to subtract to find out the common prefix<a name="line.826"></a>
-<span class="sourceLineNo">827</span>          // in the<a name="line.827"></a>
-<span class="sourceLineNo">828</span>          // row part alone<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          rowCommonPrefix = Math.min(rowCommonPrefix, current.lastCommonPrefix - 2);<a name="line.829"></a>
-<span class="sourceLineNo">830</span>        }<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        if (current.lastCommonPrefix &lt;= 2) {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>          rowCommonPrefix = 0;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>        }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        rowCommonPrefix += findCommonPrefixInRowPart(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        comp = compareCommonRowPrefix(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        if (comp == 0) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          comp = compareTypeBytes(seekCell, keyOnlyKV);<a name="line.837"></a>
-<span class="sourceLineNo">838</span>          if (comp == 0) {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>            // Subtract the fixed row key length and the family key fixed length<a name="line.839"></a>
-<span class="sourceLineNo">840</span>            familyCommonPrefix = Math.max(<a name="line.840"></a>
-<span class="sourceLineNo">841</span>                0,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>                Math.min(familyCommonPrefix,<a name="line.842"></a>
-<span class="sourceLineNo">843</span>                    current.lastCommonPrefix - (3 + keyOnlyKV.getRowLength())));<a name="line.843"></a>
-<span class="sourceLineNo">844</span>            familyCommonPrefix += findCommonPrefixInFamilyPart(seekCell, keyOnlyKV,<a name="line.844"></a>
-<span class="sourceLineNo">845</span>                familyCommonPrefix);<a name="line.845"></a>
-<span class="sourceLineNo">846</span>            comp = compareCommonFamilyPrefix(seekCell, keyOnlyKV, familyCommonPrefix);<a name="line.846"></a>
-<span class="sourceLineNo">847</span>            if (comp == 0) {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>              // subtract the rowkey fixed length and the family key fixed<a name="line.848"></a>
-<span class="sourceLineNo">849</span>              // length<a name="line.849"></a>
-<span class="sourceLineNo">850</span>              qualCommonPrefix = Math.max(<a name="line.850"></a>
-<span class="sourceLineNo">851</span>                  0,<a name="line.851"></a>
-<span class="sourceLineNo">852</span>                  Math.min(<a name="line.852"></a>
-<span class="sourceLineNo">853</span>                      qualCommonPrefix,<a name="line.853"></a>
-<span class="sourceLineNo">854</span>                      current.lastCommonPrefix<a name="line.854"></a>
-<span class="sourceLineNo">855</span>                          - (3 + keyOnlyKV.getRowLength() + keyOnlyKV.getFamilyLength())));<a name="line.855"></a>
-<span class="sourceLineNo">856</span>              qualCommonPrefix += findCommonPrefixInQualifierPart(seekCell, keyOnlyKV,<a name="line.856"></a>
-<span class="sourceLineNo">857</span>                  qualCommonPrefix);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>              comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>              if (comp == 0) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>                comp = CellComparator.compareTimestamps(seekCell, keyOnlyKV);<a name="line.860"></a>
-<span class="sourceLineNo">861</span>                if (comp == 0) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>                  // Compare types. Let the delete types sort ahead of puts;<a name="line.862"></a>
-<span class="sourceLineNo">863</span>                  // i.e. types<a name="line.863"></a>
-<span class="sourceLineNo">864</span>                  // of higher numbers sort before those of lesser numbers.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>                  // Maximum<a name="line.865"></a>
-<span class="sourceLineNo">866</span>                  // (255)<a name="line.866"></a>
-<span class="sourceLineNo">867</span>                  // appears ahead of everything, and minimum (0) appears<a name="line.867"></a>
-<span class="sourceLineNo">868</span>                  // after<a name="line.868"></a>
-<span class="sourceLineNo">869</span>                  // everything.<a name="line.869"></a>
-<span class="sourceLineNo">870</span>                  comp = (0xff &amp; keyOnlyKV.getTypeByte()) - (0xff &amp; seekCell.getTypeByte());<a name="line.870"></a>
-<span class="sourceLineNo">871</span>                }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>              }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            }<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        if (comp == 0) { // exact match<a name="line.876"></a>
-<span class="sourceLineNo">877</span>          if (seekBefore) {<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            if (!previous.isValid()) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>              // The caller (seekBefore) has to ensure that we are not at the<a name="line.879"></a>
-<span class="sourceLineNo">880</span>              // first key in the block.<a name="line.880"></a>
-<span class="sourceLineNo">881</span>              throw new IllegalStateException("Cannot seekBefore if "<a name="line.881"></a>
-<span class="sourceLineNo">882</span>                  + "positioned at the first key in the block: key="<a name="line.882"></a>
-<span class="sourceLineNo">883</span>                  + Bytes.toStringBinary(seekCell.getRowArray()));<a name="line.883"></a>
-<span class="sourceLineNo">884</span>            }<a name="line.884"></a>
-<span class="sourceLineNo">885</span>            moveToPrevious();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>            return 1;<a name="line.886"></a>
-<span class="sourceLineNo">887</span>          }<a name="line.887"></a>
-<span class="sourceLineNo">888</span>          return 0;<a name="line.888"></a>
-<span class="sourceLineNo">889</span>        }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>        if (comp &lt; 0) { // already too large, check previous<a name="line.891"></a>
-<span class="sourceLineNo">892</span>          if (previous.isValid()) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>            moveToPrevious();<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          } else {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>            return HConstants.INDEX_KEY_MAGIC; // using optimized index key<a name="line.895"></a>
-<span class="sourceLineNo">896</span>          }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>          return 1;<a name="line.897"></a>
-<span class="sourceLineNo">898</span>        }<a name="line.898"></a>
-<span class="sourceLineNo">899</span><a name="line.899"></a>
-<span class="sourceLineNo">900</span>        // move to next, if more data is available<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        if (currentBuffer.hasRemaining()) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          previous.copyFromNext(current);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>          decodeNext();<a name="line.903"></a>
-<span class="sourceLineNo">904</span>          current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.904"></a>
-<span class="sourceLineNo">905</span>        } else {<a name="line.905"></a>
-<span class="sourceLineNo">906</span>          break;<a name="line.906"></a>
-<span class="sourceLineNo">907</span>        }<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      } while (true);<a name="line.908"></a>
-<span class="sourceLineNo">909</span><a name="line.909"></a>
-<span class="sourceLineNo">910</span>      // we hit the end of the block, not an exact match<a name="line.910"></a>
-<span class="sourceLineNo">911</span>      return 1;<a name="line.911"></a>
-<span class="sourceLineNo">912</span>    }<a name="line.912"></a>
-<span class="sourceLineNo">913</span><a name="line.913"></a>
-<span class="sourceLineNo">914</span>    private int compareTypeBytes(Cell key, Cell right) {<a name="line.914"></a>
-<span class="sourceLineNo">915</span>      if (key.getFamilyLength() + key.getQualifierLength() == 0<a name="line.915"></a>
-<span class="sourceLineNo">916</span>          &amp;&amp; key.getTypeByte() == Type.Minimum.getCode()) {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>        // left is "bigger", i.e. it appears later in the sorted order<a name="line.917"></a>
-<span class="sourceLineNo">918</span>        return 1;<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      }<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      if (right.getFamilyLength() + right.getQualifierLength() == 0<a name="line.920"></a>
-<span class="sourceLineNo">921</span>          &amp;&amp; right.getTypeByte() == Type.Minimum.getCode()) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>        return -1;<a name="line.922"></a>
-<span class="sourceLineNo">923</span>      }<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      return 0;<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span><a name="line.926"></a>
-<span class="sourceLineNo">927</span>    private static int findCommonPrefixInRowPart(Cell left, Cell right, int rowCommonPrefix) {<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      return Bytes.findCommonPrefix(left.getRowArray(), right.getRowArray(), left.getRowLength()<a name="line.928"></a>
-<span class="sourceLineNo">929</span>          - rowCommonPrefix, right.getRowLength() - rowCommonPrefix, left.getRowOffset()<a name="line.929"></a>
-<span class="sourceLineNo">930</span>          + rowCommonPrefix, right.getRowOffset() + rowCommonPrefix);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    }<a name="line.931"></a>
-<span class="sourceLineNo">932</span><a name="line.932"></a>
-<span class="sourceLineNo">933</span>    private static int findCommonPrefixInFamilyPart(Cell left, Cell right, int familyCommonPrefix) {<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      return Bytes<a name="line.934"></a>
-<span class="sourceLineNo">935</span>          .findCommonPrefix(left.getFamilyArray(), right.getFamilyArray(), left.getFamilyLength()<a name="line.935"></a>
-<span class="sourceLineNo">936</span>              - familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix,<a name="line.936"></a>
-<span class="sourceLineNo">937</span>              left.getFamilyOffset() + familyCommonPrefix, right.getFamilyOffset()<a name="line.937"></a>
-<span class="sourceLineNo">938</span>                  + familyCommonPrefix);<a name="line.938"></a>
-<span class="sourceLineNo">939</span>    }<a name="line.939"></a>
-<span class="sourceLineNo">940</span><a name="line.940"></a>
-<span class="sourceLineNo">941</span>    private static int findCommonPrefixInQualifierPart(Cell left, Cell right,<a name="line.941"></a>
-<span class="sourceLineNo">942</span>        int qualifierCommonPrefix) {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      return Bytes.findCommonPrefix(left.getQualifierArray(), right.getQualifierArray(),<a name="line.943"></a>
-<span class="sourceLineNo">944</span>          left.getQualifierLength() - qualifierCommonPrefix, right.getQualifierLength()<a name="line.944"></a>
-<span class="sourceLineNo">945</span>              - qualifierCommonPrefix, left.getQualifierOffset() + qualifierCommonPrefix,<a name="line.945"></a>
-<span class="sourceLineNo">946</span>          right.getQualifierOffset() + qualifierCommonPrefix);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    private void moveToPrevious() {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!previous.isValid()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        throw new IllegalStateException(<a name="line.951"></a>
-<span class="sourceLineNo">952</span>            "Can move back only once and not in first key in the block.");<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      }<a name="line.953"></a>
-<span class="sourceLineNo">954</span><a name="line.954"></a>
-<span class="sourceLineNo">955</span>      STATE tmp = previous;<a name="line.955"></a>
-<span class="sourceLineNo">956</span>      previous = current;<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      current = tmp;<a name="line.957"></a>
-<span class="sourceLineNo">958</span><a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // move after last key value<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      currentBuffer.position(current.nextKvOffset);<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      // Alrea

<TRUNCATED>

[28/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index 3205ae9..c43d262 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -1494,7 +1494,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>FIXED_OVERHEAD</h4>
-<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2256">FIXED_OVERHEAD</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2259">FIXED_OVERHEAD</a></pre>
 </li>
 </ul>
 <a name="DEEP_OVERHEAD">
@@ -1503,7 +1503,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DEEP_OVERHEAD</h4>
-<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2260">DEEP_OVERHEAD</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2263">DEEP_OVERHEAD</a></pre>
 </li>
 </ul>
 </li>
@@ -2712,7 +2712,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastCompactSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1835">getLastCompactSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1838">getLastCompactSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getLastCompactSize()">getLastCompactSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2725,7 +2725,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1840">getSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1843">getSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getSize()">getSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2738,7 +2738,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>triggerMajorCompaction</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1845">triggerMajorCompaction</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1848">triggerMajorCompaction</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#triggerMajorCompaction()">triggerMajorCompaction</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2751,7 +2751,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1855">getScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1858">getScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
                          <a href="http://docs.oracle.com/javase/7/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;byte[]&gt;&nbsp;targetCols,
                          long&nbsp;readPt)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2773,7 +2773,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createScanner</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1870">createScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1873">createScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
                             <a href="http://docs.oracle.com/javase/7/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;byte[]&gt;&nbsp;targetCols,
                             long&nbsp;readPt,
                             <a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;scanner)
@@ -2788,7 +2788,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1881">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1884">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -2801,7 +2801,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1886">getStorefilesCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1889">getStorefilesCount</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesCount()">getStorefilesCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2814,7 +2814,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaxStoreFileAge</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1891">getMaxStoreFileAge</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1894">getMaxStoreFileAge</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMaxStoreFileAge()">getMaxStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2827,7 +2827,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMinStoreFileAge</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1910">getMinStoreFileAge</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1913">getMinStoreFileAge</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMinStoreFileAge()">getMinStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2840,7 +2840,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getAvgStoreFileAge</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1929">getAvgStoreFileAge</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1932">getAvgStoreFileAge</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getAvgStoreFileAge()">getAvgStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2853,7 +2853,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumReferenceFiles</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1952">getNumReferenceFiles</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1955">getNumReferenceFiles</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getNumReferenceFiles()">getNumReferenceFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2866,7 +2866,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumHFiles</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1963">getNumHFiles</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1966">getNumHFiles</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getNumHFiles()">getNumHFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2879,7 +2879,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreSizeUncompressed</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1974">getStoreSizeUncompressed</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1977">getStoreSizeUncompressed</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStoreSizeUncompressed()">getStoreSizeUncompressed</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2892,7 +2892,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1979">getStorefilesSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1982">getStorefilesSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesSize()">getStorefilesSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2905,7 +2905,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesIndexSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1993">getStorefilesIndexSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1996">getStorefilesIndexSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesIndexSize()">getStorefilesIndexSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2918,7 +2918,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalStaticIndexSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2007">getTotalStaticIndexSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2010">getTotalStaticIndexSize</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getTotalStaticIndexSize()">Store</a></code></strong></div>
 <div class="block">Returns the total size of all index blocks in the data block indexes, including the root level,
  intermediate levels, and the leaf level for multi-level indexes, or just the root level for
@@ -2935,7 +2935,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalStaticBloomSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2020">getTotalStaticBloomSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2023">getTotalStaticBloomSize</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getTotalStaticBloomSize()">Store</a></code></strong></div>
 <div class="block">Returns the total byte size of all Bloom filter bit arrays. For compound Bloom filters even the
  Bloom blocks currently not loaded into the block cache are counted.</div>
@@ -2951,7 +2951,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMemStoreSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2033">getMemStoreSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2036">getMemStoreSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMemStoreSize()">getMemStoreSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2964,7 +2964,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactPriority</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2038">getCompactPriority</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2041">getCompactPriority</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactPriority()">getCompactPriority</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2977,7 +2977,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>throttleCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2047">throttleCompaction</a>(long&nbsp;compactionSize)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2050">throttleCompaction</a>(long&nbsp;compactionSize)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#throttleCompaction(long)">throttleCompaction</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2990,7 +2990,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getHRegion</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2051">getHRegion</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2054">getHRegion</a>()</pre>
 </li>
 </ul>
 <a name="getCoprocessorHost()">
@@ -2999,7 +2999,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCoprocessorHost</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2056">getCoprocessorHost</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2059">getCoprocessorHost</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCoprocessorHost()">getCoprocessorHost</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3012,7 +3012,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionInfo</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2061">getRegionInfo</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2064">getRegionInfo</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getRegionInfo()">getRegionInfo</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3025,7 +3025,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>areWritesEnabled</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2066">areWritesEnabled</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2069">areWritesEnabled</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#areWritesEnabled()">areWritesEnabled</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3038,7 +3038,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSmallestReadPoint</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2071">getSmallestReadPoint</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2074">getSmallestReadPoint</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getSmallestReadPoint()">getSmallestReadPoint</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3053,7 +3053,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>updateColumnValue</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2087">updateColumnValue</a>(byte[]&nbsp;row,
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2090">updateColumnValue</a>(byte[]&nbsp;row,
                      byte[]&nbsp;f,
                      byte[]&nbsp;qualifier,
                      long&nbsp;newValue)
@@ -3073,7 +3073,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>upsert</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2107">upsert</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;cells,
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2110">upsert</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;cells,
           long&nbsp;readpoint)
             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#upsert(java.lang.Iterable,%20long)">Store</a></code></strong></div>
@@ -3099,7 +3099,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createFlushContext</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2117">createFlushContext</a>(long&nbsp;cacheFlushId)</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2120">createFlushContext</a>(long&nbsp;cacheFlushId)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#createFlushContext(long)">createFlushContext</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3112,7 +3112,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>needsCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2247">needsCompaction</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2250">needsCompaction</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#needsCompaction()">Store</a></code></strong></div>
 <div class="block">See if there's too much store files in this store</div>
 <dl>
@@ -3127,7 +3127,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2252">getCacheConfig</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2255">getCacheConfig</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCacheConfig()">Store</a></code></strong></div>
 <div class="block">Used for tests.</div>
 <dl>
@@ -3142,7 +3142,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2267">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2270">heapSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize()">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -3156,7 +3156,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getComparator</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2272">getComparator</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2275">getComparator</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getComparator()">getComparator</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3169,7 +3169,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanInfo</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2277">getScanInfo</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2280">getScanInfo</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getScanInfo()">getScanInfo</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3182,7 +3182,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>setScanInfo</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2285">setScanInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2288">setScanInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo)</pre>
 <div class="block">Set scan info, used by test</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>scanInfo</code> - new scan info to use for test</dd></dl>
 </li>
@@ -3193,7 +3193,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>hasTooManyStoreFiles</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2290">hasTooManyStoreFiles</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2293">hasTooManyStoreFiles</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#hasTooManyStoreFiles()">hasTooManyStoreFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3206,7 +3206,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2295">getFlushedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2298">getFlushedCellsCount</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushedCellsCount()">getFlushedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3219,7 +3219,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2300">getFlushedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2303">getFlushedCellsSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushedCellsSize()">getFlushedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3232,7 +3232,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2305">getCompactedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2308">getCompactedCellsCount</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedCellsCount()">getCompactedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3245,7 +3245,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2310">getCompactedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2313">getCompactedCellsSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedCellsSize()">getCompactedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3258,7 +3258,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMajorCompactedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2315">getMajorCompactedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2318">getMajorCompactedCellsCount</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMajorCompactedCellsCount()">getMajorCompactedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3271,7 +3271,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMajorCompactedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2320">getMajorCompactedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2323">getMajorCompactedCellsSize</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMajorCompactedCellsSize()">getMajorCompactedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3284,7 +3284,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreEngine</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2329">getStoreEngine</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2332">getStoreEngine</a>()</pre>
 <div class="block">Returns the StoreEngine that is backing this concrete implementation of Store.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>Returns the <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver"><code>StoreEngine</code></a> object used internally inside this HStore object.</dd></dl>
 </li>
@@ -3295,7 +3295,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getOffPeakHours</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2333">getOffPeakHours</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2336">getOffPeakHours</a>()</pre>
 </li>
 </ul>
 <a name="onConfigurationChange(org.apache.hadoop.conf.Configuration)">
@@ -3304,7 +3304,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>onConfigurationChange</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2341">onConfigurationChange</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2344">onConfigurationChange</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">This method would be called by the <a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf"><code>ConfigurationManager</code></a>
  object when the <code>Configuration</code> object is reloaded from disk.</div>
 <dl>
@@ -3319,7 +3319,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>registerChildren</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2353">registerChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2356">registerChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
 <div class="block">Needs to be called to register the children to the manager.</div>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -3333,7 +3333,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>deregisterChildren</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2361">deregisterChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2364">deregisterChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
 <div class="block">Needs to be called to deregister the children from the manager.</div>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -3347,7 +3347,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactionPressure</h4>
-<pre>public&nbsp;double&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2366">getCompactionPressure</a>()</pre>
+<pre>public&nbsp;double&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2369">getCompactionPressure</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactionPressure()">Store</a></code></strong></div>
 <div class="block">This value can represent the degree of emergency of compaction for this store. It should be
  greater than or equal to 0.0, any value greater than 1.0 means we have too many store files.
@@ -3374,7 +3374,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>isPrimaryReplicaStore</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2371">isPrimaryReplicaStore</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2374">isPrimaryReplicaStore</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#isPrimaryReplicaStore()">isPrimaryReplicaStore</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3387,7 +3387,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>closeAndArchiveCompactedFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2376">closeAndArchiveCompactedFiles</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2379">closeAndArchiveCompactedFiles</a>()
                                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#closeAndArchiveCompactedFiles()">Store</a></code></strong></div>
 <div class="block">Closes and archives the compacted files under this store</div>
@@ -3404,7 +3404,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>removeCompactedfiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2404">removeCompactedfiles</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFile</a>&gt;&nbsp;compactedfiles)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2407">removeCompactedfiles</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFile</a>&gt;&nbsp;compactedfiles)
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Archives and removes the compacted files</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>compactedfiles</code> - The compacted files in this store that are not active in reads</dd>
@@ -3418,7 +3418,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>finalizeFlush</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2451">finalizeFlush</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2454">finalizeFlush</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#finalizeFlush()">Store</a></code></strong></div>
 <div class="block">This method is called when it is clear that the flush to disk is completed.
  The store may do any post-flush actions at this point.
@@ -3435,7 +3435,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockListLast">
 <li class="blockList">
 <h4>clearCompactedfiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2455">clearCompactedfiles</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFile</a>&gt;&nbsp;filesToRemove)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2458">clearCompactedfiles</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFile</a>&gt;&nbsp;filesToRemove)
                           throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html
index 7adeb06..44ccb4f 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html
@@ -322,7 +322,7 @@ extends org.apache.hadoop.conf.Configured</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>create</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">RegionSplitPolicy</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html#line.100">create</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">RegionSplitPolicy</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html#line.101">create</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region,
                        org.apache.hadoop.conf.Configuration&nbsp;conf)
                                 throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Create the RegionSplitPolicy configured for the given table.</div>
@@ -338,7 +338,7 @@ extends org.apache.hadoop.conf.Configured</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getSplitPolicyClass</h4>
-<pre>public static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">RegionSplitPolicy</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html#line.109">getSplitPolicyClass</a>(<a href="../../../../../org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a>&nbsp;htd,
+<pre>public static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html" title="class in org.apache.hadoop.hbase.regionserver">RegionSplitPolicy</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html#line.110">getSplitPolicyClass</a>(<a href="../../../../../org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a>&nbsp;htd,
                                                      org.apache.hadoop.conf.Configuration&nbsp;conf)
                                                               throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -351,7 +351,7 @@ extends org.apache.hadoop.conf.Configured</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>skipStoreFileRangeCheck</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html#line.139">skipStoreFileRangeCheck</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;familyName)</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.html#line.140">skipStoreFileRangeCheck</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;familyName)</pre>
 <div class="block">In <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#splitStoreFile(org.apache.hadoop.hbase.HRegionInfo,%20java.lang.String,%20org.apache.hadoop.hbase.regionserver.StoreFile,%20byte[],%20boolean,%20org.apache.hadoop.hbase.regionserver.RegionSplitPolicy)"><code>HRegionFileSystem.splitStoreFile(org.apache.hadoop.hbase.HRegionInfo, String,
  StoreFile, byte[], boolean, RegionSplitPolicy)</code></a> we are not creating the split reference
  if split row not lies in the StoreFile range. But in some use cases we may need to create

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 915e53f..fcd0473 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -618,24 +618,24 @@
 <ul>
 <li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="strong">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">ScannerContext.LimitScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">ScannerContext.NextState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreScanner.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">MemStoreScanner.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">ScanType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">BloomType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreScanner.StoreScannerCompactionRace.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">StoreScanner.StoreScannerCompactionRace</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">DefaultHeapMemoryTuner.StepDirection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitTransaction.SplitTransactionPhase.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">SplitTransaction.SplitTransactionPhase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">Region.FlushResult.Result</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionMergeTransaction.RegionMergeTransactionPhase.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">RegionMergeTransaction.RegionMergeTransactionPhase</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreScanner.StoreScannerCompactionRace.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">StoreScanner.StoreScannerCompactionRace</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">ScanType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">ScanQueryMatcher.MatchCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">DeleteTracker.DeleteResult</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionOpeningState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">RegionOpeningState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">Region.FlushResult.Result</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitTransaction.SplitTransactionPhase.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">SplitTransaction.SplitTransactionPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DeleteTracker.DeleteCompare.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">DeleteTracker.DeleteCompare</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">FlushType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreScanner.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">MemStoreScanner.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">DefaultHeapMemoryTuner.StepDirection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">Region.Operation</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">DeleteTracker.DeleteResult</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionOpeningState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">RegionOpeningState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="strong">BloomType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html
index 981248f..0ee9537 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html
@@ -251,7 +251,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DEPRECATED_NAME_OF_NO_LIMIT_THROUGHPUT_CONTROLLER_CLASS</h4>
-<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#line.46">DEPRECATED_NAME_OF_NO_LIMIT_THROUGHPUT_CONTROLLER_CLASS</a></pre>
+<pre>private static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#line.45">DEPRECATED_NAME_OF_NO_LIMIT_THROUGHPUT_CONTROLLER_CLASS</a></pre>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory.DEPRECATED_NAME_OF_NO_LIMIT_THROUGHPUT_CONTROLLER_CLASS">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -286,7 +286,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>create</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.html" title="interface in org.apache.hadoop.hbase.regionserver.throttle">ThroughputController</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#line.50">create</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;server,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.html" title="interface in org.apache.hadoop.hbase.regionserver.throttle">ThroughputController</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#line.48">create</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;server,
                           org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
@@ -296,7 +296,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getThroughputControllerClass</h4>
-<pre>public static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.html" title="interface in org.apache.hadoop.hbase.regionserver.throttle">ThroughputController</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#line.58">getThroughputControllerClass</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.html" title="interface in org.apache.hadoop.hbase.regionserver.throttle">ThroughputController</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#line.56">getThroughputControllerClass</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 <a name="resolveDeprecatedClassName(java.lang.String)">
@@ -305,7 +305,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>resolveDeprecatedClassName</h4>
-<pre>private static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#line.79">resolveDeprecatedClassName</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;oldName)</pre>
+<pre>private static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.html#line.77">resolveDeprecatedClassName</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;oldName)</pre>
 <div class="block">Resolve deprecated class name to keep backward compatibiliy</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>oldName</code> - old name of the class</dd>
 <dt><span class="strong">Returns:</span></dt><dd>the new name if there is any</dd></dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html b/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
index 22b494a..3e33526 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.357">SecureBulkLoadEndpoint.SecureBulkLoadListener</a>
+<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.367">SecureBulkLoadEndpoint.SecureBulkLoadListener</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html" title="interface in org.apache.hadoop.hbase.regionserver">Region.BulkLoadListener</a></pre>
 </li>
@@ -233,7 +233,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockList">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.359">fs</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.369">fs</a></pre>
 </li>
 </ul>
 <a name="stagingDir">
@@ -242,7 +242,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockList">
 <li class="blockList">
 <h4>stagingDir</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.360">stagingDir</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.370">stagingDir</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -251,7 +251,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.361">conf</a></pre>
+<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.371">conf</a></pre>
 </li>
 </ul>
 <a name="srcFs">
@@ -260,7 +260,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockList">
 <li class="blockList">
 <h4>srcFs</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.363">srcFs</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.373">srcFs</a></pre>
 </li>
 </ul>
 <a name="origPermissions">
@@ -269,7 +269,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockListLast">
 <li class="blockList">
 <h4>origPermissions</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.permission.FsPermission&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.364">origPermissions</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.permission.FsPermission&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.374">origPermissions</a></pre>
 </li>
 </ul>
 </li>
@@ -286,7 +286,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SecureBulkLoadEndpoint.SecureBulkLoadListener</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.366">SecureBulkLoadEndpoint.SecureBulkLoadListener</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.376">SecureBulkLoadEndpoint.SecureBulkLoadListener</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                              <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;stagingDir,
                                              org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
@@ -305,7 +305,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockList">
 <li class="blockList">
 <h4>prepareBulkLoad</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.374">prepareBulkLoad</a>(byte[]&nbsp;family,
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.384">prepareBulkLoad</a>(byte[]&nbsp;family,
                      <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;srcPath)
                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html#prepareBulkLoad(byte[],%20java.lang.String)">Region.BulkLoadListener</a></code></strong></div>
@@ -325,7 +325,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockList">
 <li class="blockList">
 <h4>doneBulkLoad</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.411">doneBulkLoad</a>(byte[]&nbsp;family,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.421">doneBulkLoad</a>(byte[]&nbsp;family,
                 <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;srcPath)
                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html#doneBulkLoad(byte[],%20java.lang.String)">Region.BulkLoadListener</a></code></strong></div>
@@ -344,7 +344,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockList">
 <li class="blockList">
 <h4>failedBulkLoad</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.416">failedBulkLoad</a>(byte[]&nbsp;family,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.426">failedBulkLoad</a>(byte[]&nbsp;family,
                   <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;srcPath)
                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/regionserver/Region.BulkLoadListener.html#failedBulkLoad(byte[],%20java.lang.String)">Region.BulkLoadListener</a></code></strong></div>
@@ -363,7 +363,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Regio
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isFile</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.451">isFile</a>(org.apache.hadoop.fs.Path&nbsp;p)
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.SecureBulkLoadListener.html#line.461">isFile</a>(org.apache.hadoop.fs.Path&nbsp;p)
                 throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check if the path is referencing a file.
  This is mainly needed to avoid symlinks.</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html b/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
index d7cbdc4..bdacb58 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html
@@ -576,7 +576,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/coprocessor/Coproc
 <ul class="blockList">
 <li class="blockList">
 <h4>getBulkLoadObservers</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.html" title="interface in org.apache.hadoop.hbase.coprocessor">BulkLoadObserver</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.312">getBulkLoadObservers</a>()</pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.html" title="interface in org.apache.hadoop.hbase.coprocessor">BulkLoadObserver</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.322">getBulkLoadObservers</a>()</pre>
 </li>
 </ul>
 <a name="createStagingDir(org.apache.hadoop.fs.Path, org.apache.hadoop.hbase.security.User, org.apache.hadoop.hbase.TableName)">
@@ -585,7 +585,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/coprocessor/Coproc
 <ul class="blockList">
 <li class="blockList">
 <h4>createStagingDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.319">createStagingDir</a>(org.apache.hadoop.fs.Path&nbsp;baseDir,
+<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.329">createStagingDir</a>(org.apache.hadoop.fs.Path&nbsp;baseDir,
                                          <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user,
                                          <a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                                             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -599,7 +599,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/coprocessor/Coproc
 <ul class="blockList">
 <li class="blockList">
 <h4>createStagingDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.328">createStagingDir</a>(org.apache.hadoop.fs.Path&nbsp;baseDir,
+<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.338">createStagingDir</a>(org.apache.hadoop.fs.Path&nbsp;baseDir,
                                          <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user,
                                          <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;randomDir)
                                             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -613,7 +613,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/coprocessor/Coproc
 <ul class="blockList">
 <li class="blockList">
 <h4>getActiveUser</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.337">getActiveUser</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.347">getActiveUser</a>()</pre>
 </li>
 </ul>
 <a name="getService()">
@@ -622,7 +622,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/coprocessor/Coproc
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getService</h4>
-<pre>public&nbsp;com.google.protobuf.Service&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.353">getService</a>()</pre>
+<pre>public&nbsp;com.google.protobuf.Service&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.html#line.363">getService</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/coprocessor/CoprocessorService.html#getService()">getService</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/coprocessor/CoprocessorService.html" title="interface in org.apache.hadoop.hbase.coprocessor">CoprocessorService</a></code></dd>


[04/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
index 9a60dce..fcaf416 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
@@ -34,1938 +34,1994 @@
 <span class="sourceLineNo">026</span>import java.util.concurrent.locks.Lock;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.27"></a>
 <span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.Cell;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.HConstants;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.io.IOUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>import com.google.common.annotations.VisibleForTesting;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import com.google.common.base.Preconditions;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>/**<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * Reading {@link HFile} version 1 and 2 blocks, and writing version 2 blocks.<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * &lt;ul&gt;<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * &lt;li&gt;In version 1 all blocks are always compressed or uncompressed, as<a name="line.59"></a>
-<span class="sourceLineNo">060</span> * specified by the {@link HFile}'s compression algorithm, with a type-specific<a name="line.60"></a>
-<span class="sourceLineNo">061</span> * magic record stored in the beginning of the compressed data (i.e. one needs<a name="line.61"></a>
-<span class="sourceLineNo">062</span> * to uncompress the compressed block to determine the block type). There is<a name="line.62"></a>
-<span class="sourceLineNo">063</span> * only a single compression algorithm setting for all blocks. Offset and size<a name="line.63"></a>
-<span class="sourceLineNo">064</span> * information from the block index are required to read a block.<a name="line.64"></a>
-<span class="sourceLineNo">065</span> * &lt;li&gt;In version 2 a block is structured as follows:<a name="line.65"></a>
-<span class="sourceLineNo">066</span> * &lt;ul&gt;<a name="line.66"></a>
-<span class="sourceLineNo">067</span> * &lt;li&gt;header (see Writer#finishBlock())<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * &lt;ul&gt;<a name="line.68"></a>
-<span class="sourceLineNo">069</span> * &lt;li&gt;Magic record identifying the block type (8 bytes)<a name="line.69"></a>
-<span class="sourceLineNo">070</span> * &lt;li&gt;Compressed block size, excluding header, including checksum (4 bytes)<a name="line.70"></a>
-<span class="sourceLineNo">071</span> * &lt;li&gt;Uncompressed block size, excluding header, excluding checksum (4 bytes)<a name="line.71"></a>
-<span class="sourceLineNo">072</span> * &lt;li&gt;The offset of the previous block of the same type (8 bytes). This is<a name="line.72"></a>
-<span class="sourceLineNo">073</span> * used to be able to navigate to the previous block without going to the block<a name="line.73"></a>
-<span class="sourceLineNo">074</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.74"></a>
-<span class="sourceLineNo">075</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.75"></a>
-<span class="sourceLineNo">076</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the size of data on disk, including header,<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * excluding checksums (4 bytes)<a name="line.77"></a>
-<span class="sourceLineNo">078</span> * &lt;/ul&gt;<a name="line.78"></a>
-<span class="sourceLineNo">079</span> * &lt;/li&gt;<a name="line.79"></a>
-<span class="sourceLineNo">080</span> * &lt;li&gt;Raw/Compressed/Encrypted/Encoded data. The compression algorithm is the<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * same for all the blocks in the {@link HFile}, similarly to what was done in<a name="line.81"></a>
-<span class="sourceLineNo">082</span> * version 1.<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * &lt;li&gt;For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * the number of bytes specified by bytesPerChecksum.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * &lt;/ul&gt;<a name="line.85"></a>
-<span class="sourceLineNo">086</span> * &lt;/ul&gt;<a name="line.86"></a>
-<span class="sourceLineNo">087</span> */<a name="line.87"></a>
-<span class="sourceLineNo">088</span>@InterfaceAudience.Private<a name="line.88"></a>
-<span class="sourceLineNo">089</span>public class HFileBlock implements Cacheable {<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>  /**<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * On a checksum failure on a Reader, these many suceeding read<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * requests switch back to using hdfs checksums before auto-reenabling<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * hbase checksum verification.<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final boolean FILL_HEADER = true;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  /**<a name="line.101"></a>
-<span class="sourceLineNo">102</span>   * The size of block header when blockType is {@link BlockType#ENCODED_DATA}.<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   * This extends normal header by adding the id of encoder.<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   */<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  public static final int ENCODED_HEADER_SIZE = HConstants.HFILEBLOCK_HEADER_SIZE<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      + DataBlockEncoding.ID_SIZE;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.108"></a>
-<span class="sourceLineNo">109</span>     new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>  // meta.usesHBaseChecksum+offset+nextBlockOnDiskSizeWithHeader<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      + Bytes.SIZEOF_LONG;<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  /**<a name="line.119"></a>
-<span class="sourceLineNo">120</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  static final CacheableDeserializer&lt;Cacheable&gt; blockDeserializer =<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.126"></a>
-<span class="sourceLineNo">127</span>            throws IOException {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>          buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();<a name="line.128"></a>
-<span class="sourceLineNo">129</span>          ByteBuff newByteBuffer;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          if (reuse) {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>            newByteBuffer = buf.slice();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>          } else {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>            // Used only in tests<a name="line.133"></a>
-<span class="sourceLineNo">134</span>            int len = buf.limit();<a name="line.134"></a>
-<span class="sourceLineNo">135</span>            newByteBuffer = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.135"></a>
-<span class="sourceLineNo">136</span>            newByteBuffer.put(0, buf, buf.position(), len);<a name="line.136"></a>
-<span class="sourceLineNo">137</span>          }<a name="line.137"></a>
-<span class="sourceLineNo">138</span>          buf.position(buf.limit());<a name="line.138"></a>
-<span class="sourceLineNo">139</span>          buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>          boolean usesChecksum = buf.get() == (byte)1;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>          HFileBlock hFileBlock = new HFileBlock(newByteBuffer, usesChecksum, memType);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>          hFileBlock.offset = buf.getLong();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>          hFileBlock.nextBlockOnDiskSizeWithHeader = buf.getInt();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>          if (hFileBlock.hasNextBlockHeader()) {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>            hFileBlock.buf.limit(hFileBlock.buf.limit() - hFileBlock.headerSize());<a name="line.145"></a>
-<span class="sourceLineNo">146</span>          }<a name="line.146"></a>
-<span class="sourceLineNo">147</span>          return hFileBlock;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>        @Override<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        public int getDeserialiserIdentifier() {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>          return deserializerIdentifier;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>        @Override<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>          // Used only in tests<a name="line.157"></a>
-<span class="sourceLineNo">158</span>          return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        }<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      };<a name="line.160"></a>
-<span class="sourceLineNo">161</span>  private static final int deserializerIdentifier;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  static {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    deserializerIdentifier = CacheableDeserializerIdManager<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        .registerDeserializer(blockDeserializer);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  }<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  /** Type of block. Header field 0. */<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private BlockType blockType;<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>  /** Size on disk excluding header, including checksum. Header field 1. */<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  private int onDiskSizeWithoutHeader;<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /** Size of pure data. Does not include header or checksums. Header field 2. */<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  private final int uncompressedSizeWithoutHeader;<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  /** The offset of the previous block on disk. Header field 3. */<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private final long prevBlockOffset;<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  /**<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final int onDiskDataSizeWithHeader;<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /** The in-memory representation of the hfile block */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private ByteBuff buf;<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span>  /** Meta data that holds meta information on the hfileblock */<a name="line.188"></a>
-<span class="sourceLineNo">189</span>  private HFileContext fileContext;<a name="line.189"></a>
+<span class="sourceLineNo">029</span>import org.apache.commons.logging.Log;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.commons.logging.LogFactory;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.fs.Path;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.Cell;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.HConstants;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.io.IOUtils;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>import com.google.common.annotations.VisibleForTesting;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import com.google.common.base.Preconditions;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>/**<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * Reads {@link HFile} version 1 and version 2 blocks but writes version 2 blocks only.<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * Version 2 was introduced in hbase-0.92.0. Does read and write out to the filesystem but also<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * the read and write to Cache.<a name="line.61"></a>
+<span class="sourceLineNo">062</span> *<a name="line.62"></a>
+<span class="sourceLineNo">063</span> * &lt;h3&gt;HFileBlock: Version 1&lt;/h3&gt;<a name="line.63"></a>
+<span class="sourceLineNo">064</span> * As of this writing, there should be no more version 1 blocks found out in the wild. Version 2<a name="line.64"></a>
+<span class="sourceLineNo">065</span> * as introduced in hbase-0.92.0.<a name="line.65"></a>
+<span class="sourceLineNo">066</span> * In version 1 all blocks are always compressed or uncompressed, as<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * specified by the {@link HFile}'s compression algorithm, with a type-specific<a name="line.67"></a>
+<span class="sourceLineNo">068</span> * magic record stored in the beginning of the compressed data (i.e. one needs<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * to uncompress the compressed block to determine the block type). There is<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * only a single compression algorithm setting for all blocks. Offset and size<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * information from the block index are required to read a block.<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * &lt;h3&gt;HFileBlock: Version 2&lt;/h3&gt;<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * In version 2, a block is structured as follows:<a name="line.73"></a>
+<span class="sourceLineNo">074</span> * &lt;ul&gt;<a name="line.74"></a>
+<span class="sourceLineNo">075</span> * &lt;li&gt;&lt;b&gt;Header:&lt;/b&gt; See Writer#putHeader(); header total size is HFILEBLOCK_HEADER_SIZE)<a name="line.75"></a>
+<span class="sourceLineNo">076</span> * &lt;ul&gt;<a name="line.76"></a>
+<span class="sourceLineNo">077</span> * &lt;li&gt;Magic record identifying the {@link BlockType} (8 bytes): e.g. &lt;code&gt;DATABLK*&lt;/code&gt;<a name="line.77"></a>
+<span class="sourceLineNo">078</span> * &lt;li&gt;Compressed -- a.k.a 'on disk' -- block size, excluding header, but including<a name="line.78"></a>
+<span class="sourceLineNo">079</span> *     tailing checksum bytes (4 bytes)<a name="line.79"></a>
+<span class="sourceLineNo">080</span> * &lt;li&gt;Uncompressed block size, excluding header, and excluding checksum bytes (4 bytes)<a name="line.80"></a>
+<span class="sourceLineNo">081</span> * &lt;li&gt;The offset of the previous block of the same type (8 bytes). This is<a name="line.81"></a>
+<span class="sourceLineNo">082</span> * used to navigate to the previous block without having to go to the block index<a name="line.82"></a>
+<span class="sourceLineNo">083</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.83"></a>
+<span class="sourceLineNo">084</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the size of data 'on disk', including header,<a name="line.85"></a>
+<span class="sourceLineNo">086</span> * excluding checksums (4 bytes)<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * &lt;/ul&gt;<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * &lt;/li&gt;<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * &lt;li&gt;&lt;b&gt;Raw/Compressed/Encrypted/Encoded data:&lt;/b&gt; The compression algorithm is the<a name="line.89"></a>
+<span class="sourceLineNo">090</span> * same for all the blocks in the {@link HFile}, similarly to what was done in<a name="line.90"></a>
+<span class="sourceLineNo">091</span> * version 1. If compression is NONE, this is just raw, serialized Cells.<a name="line.91"></a>
+<span class="sourceLineNo">092</span> * &lt;li&gt;&lt;b&gt;Tail:&lt;/b&gt; For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.92"></a>
+<span class="sourceLineNo">093</span> * the number of bytes specified by bytesPerChecksum.<a name="line.93"></a>
+<span class="sourceLineNo">094</span> * &lt;/ul&gt;<a name="line.94"></a>
+<span class="sourceLineNo">095</span> * &lt;p&gt;Be aware that when we read from HDFS, we overread pulling in the next blocks' header too.<a name="line.95"></a>
+<span class="sourceLineNo">096</span> * We do this to save having to do two seeks to read an HFileBlock; a seek to read the header<a name="line.96"></a>
+<span class="sourceLineNo">097</span> * to figure lengths, etc., and then another seek to pull in the data.<a name="line.97"></a>
+<span class="sourceLineNo">098</span> */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>@InterfaceAudience.Private<a name="line.99"></a>
+<span class="sourceLineNo">100</span>public class HFileBlock implements Cacheable {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final Log LOG = LogFactory.getLog(HFileBlock.class);<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  /**<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   * On a checksum failure, do these many succeeding read requests using hdfs checksums before<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * auto-reenabling hbase checksum verification.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   */<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.107"></a>
+<span class="sourceLineNo">108</span><a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static int UNSET = -1;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  public static final boolean FILL_HEADER = true;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.111"></a>
+<span class="sourceLineNo">112</span><a name="line.112"></a>
+<span class="sourceLineNo">113</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.115"></a>
+<span class="sourceLineNo">116</span><a name="line.116"></a>
+<span class="sourceLineNo">117</span>  /**<a name="line.117"></a>
+<span class="sourceLineNo">118</span>   * See #blockDeserializer method for more info.<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * 13 bytes of extra stuff stuck on the end of the HFileBlock that we pull in from HDFS (note,<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * when we read from HDFS, we pull in an HFileBlock AND the header of the next block if one).<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   * The 13 bytes are: usesHBaseChecksum (1 byte) + offset of this block (long) +<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * nextBlockOnDiskSizeWithHeader (int).<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   */<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public static final int EXTRA_SERIALIZATION_SPACE =<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT + Bytes.SIZEOF_LONG;<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>  /**<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  /**<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * Used deserializing blocks from Cache.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   *<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * Serializing to cache is a little hard to follow. See Writer#finishBlock for where it is done.<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   * When we start to append to a new HFileBlock,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>   * we skip over where the header should go before we start adding Cells. When the block is<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * done, we'll then go back and fill in the header and the checksum tail. Be aware that what<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   * gets serialized into the blockcache is a byte array that contains an HFileBlock followed by<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   * its checksums and then the header of the next HFileBlock (needed to help navigate), followed<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   * again by an extra 13 bytes of meta info needed when time to recreate the HFileBlock from cache.<a name="line.144"></a>
+<span class="sourceLineNo">145</span>   *<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   * ++++++++++++++<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   * + HFileBlock +<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   * ++++++++++++++<a name="line.148"></a>
+<span class="sourceLineNo">149</span>   * + Checksums  +<a name="line.149"></a>
+<span class="sourceLineNo">150</span>   * ++++++++++++++<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * + NextHeader +<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * ++++++++++++++<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * + ExtraMeta! +<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * ++++++++++++++<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   *<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * TODO: Fix it so we do NOT put the NextHeader into blockcache. It is not necessary.<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  static final CacheableDeserializer&lt;Cacheable&gt; blockDeserializer =<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.159"></a>
+<span class="sourceLineNo">160</span>        public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.160"></a>
+<span class="sourceLineNo">161</span>        throws IOException {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>          // Rewind to just before the EXTRA_SERIALIZATION_SPACE.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>          buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>          // Get a new buffer to pass the deserialized HFileBlock for it to 'own'.<a name="line.164"></a>
+<span class="sourceLineNo">165</span>          ByteBuff newByteBuffer;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          if (reuse) {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>            newByteBuffer = buf.slice();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          } else {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>            int len = buf.limit();<a name="line.169"></a>
+<span class="sourceLineNo">170</span>            newByteBuffer = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.170"></a>
+<span class="sourceLineNo">171</span>            newByteBuffer.put(0, buf, buf.position(), len);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>          }<a name="line.172"></a>
+<span class="sourceLineNo">173</span>          // Read out the EXTRA_SERIALIZATION_SPACE content and shove into our HFileBlock.<a name="line.173"></a>
+<span class="sourceLineNo">174</span>          buf.position(buf.limit());<a name="line.174"></a>
+<span class="sourceLineNo">175</span>          buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);<a name="line.175"></a>
+<span class="sourceLineNo">176</span>          boolean usesChecksum = buf.get() == (byte)1;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>          HFileBlock hFileBlock = new HFileBlock(newByteBuffer, usesChecksum, memType);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>          hFileBlock.offset = buf.getLong();<a name="line.178"></a>
+<span class="sourceLineNo">179</span>          hFileBlock.nextBlockOnDiskSizeWithHeader = buf.getInt();<a name="line.179"></a>
+<span class="sourceLineNo">180</span>          if (hFileBlock.hasNextBlockHeader()) {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>            hFileBlock.buf.limit(hFileBlock.buf.limit() - hFileBlock.headerSize());<a name="line.181"></a>
+<span class="sourceLineNo">182</span>          }<a name="line.182"></a>
+<span class="sourceLineNo">183</span>          return hFileBlock;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>        @Override<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        public int getDeserialiserIdentifier() {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>          return deserializerIdentifier;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>        }<a name="line.189"></a>
 <span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>  /**<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * The offset of this block in the file. Populated by the reader for<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * convenience of access. This offset is not part of the block header.<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  private long offset = -1;<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * The on-disk size of the next block, including the header, obtained by<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * peeking into the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the next block's<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * header, or -1 if unknown.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  private int nextBlockOnDiskSizeWithHeader = -1;<a name="line.202"></a>
+<span class="sourceLineNo">191</span>        @Override<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>          // Used only in tests<a name="line.193"></a>
+<span class="sourceLineNo">194</span>          return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>        }<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      };<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  private static final int deserializerIdentifier;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  static {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    deserializerIdentifier = CacheableDeserializerIdManager<a name="line.200"></a>
+<span class="sourceLineNo">201</span>        .registerDeserializer(blockDeserializer);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
 <span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * is mostly used when the block data has already been read and uncompressed,<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   * and is sitting in a byte buffer.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>   *<a name="line.210"></a>
-<span class="sourceLineNo">211</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.213"></a>
-<span class="sourceLineNo">214</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.214"></a>
-<span class="sourceLineNo">215</span>   * @param buf block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes) followed by<a name="line.215"></a>
-<span class="sourceLineNo">216</span>   *          uncompressed data. This<a name="line.216"></a>
-<span class="sourceLineNo">217</span>   * @param fillHeader when true, parse {@code buf} and override the first 4 header fields.<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * @param offset the file offset the block was read from<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.219"></a>
-<span class="sourceLineNo">220</span>   * @param fileContext HFile meta data<a name="line.220"></a>
-<span class="sourceLineNo">221</span>   */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      long prevBlockOffset, ByteBuff buf, boolean fillHeader, long offset,<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    this.blockType = blockType;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    this.buf = buf;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    this.offset = offset;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    this.fileContext = fileContext;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    if (fillHeader)<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      overwriteHeader();<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    this.buf.rewind();<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  }<a name="line.236"></a>
+<span class="sourceLineNo">204</span>  /** Type of block. Header field 0. */<a name="line.204"></a>
+<span class="sourceLineNo">205</span>  private BlockType blockType;<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  /**<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * Size on disk excluding header, including checksum. Header field 1.<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   */<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  private int onDiskSizeWithoutHeader;<a name="line.211"></a>
+<span class="sourceLineNo">212</span><a name="line.212"></a>
+<span class="sourceLineNo">213</span>  /**<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   * Size of pure data. Does not include header or checksums. Header field 2.<a name="line.214"></a>
+<span class="sourceLineNo">215</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.215"></a>
+<span class="sourceLineNo">216</span>   */<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  private final int uncompressedSizeWithoutHeader;<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  /**<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   * The offset of the previous block on disk. Header field 3.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   */<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  private final long prevBlockOffset;<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  /**<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private final int onDiskDataSizeWithHeader;<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  /** The in-memory representation of the hfile block */<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  private ByteBuff buf;<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>  /** Meta data that holds meta information on the hfileblock */<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private HFileContext fileContext;<a name="line.236"></a>
 <span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    this(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset,<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        new SingleByteBuff(buf), fillHeader, offset, onDiskDataSizeWithHeader, fileContext);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  }<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /**<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.246"></a>
-<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  HFileBlock(HFileBlock that) {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    this.blockType = that.blockType;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    this.prevBlockOffset = that.prevBlockOffset;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    this.buf = that.buf.duplicate();<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this.offset = that.offset;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    this.fileContext = that.fileContext;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
-<span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>  HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    this(new SingleByteBuff(b), usesHBaseChecksum);<a name="line.261"></a>
-<span class="sourceLineNo">262</span>  }<a name="line.262"></a>
-<span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * to that point.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum) throws IOException {<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this(b, usesHBaseChecksum, MemoryType.EXCLUSIVE);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  /**<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * to that point.<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum, MemoryType memType) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    b.rewind();<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    blockType = BlockType.read(b);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    onDiskSizeWithoutHeader = b.getInt();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    uncompressedSizeWithoutHeader = b.getInt();<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    prevBlockOffset = b.getLong();<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    HFileContextBuilder contextBuilder = new HFileContextBuilder();<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    contextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    if (usesHBaseChecksum) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>      contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      contextBuilder.withBytesPerCheckSum(b.getInt());<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      this.onDiskDataSizeWithHeader = b.getInt();<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    } else {<a name="line.292"></a>
-<span class="sourceLineNo">293</span>      contextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      contextBuilder.withBytesPerCheckSum(0);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader +<a name="line.295"></a>
-<span class="sourceLineNo">296</span>                                       HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    this.fileContext = contextBuilder.build();<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    this.memType = memType;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    buf = b;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    buf.rewind();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
-<span class="sourceLineNo">303</span><a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public BlockType getBlockType() {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    return blockType;<a name="line.305"></a>
+<span class="sourceLineNo">238</span>  /**<a name="line.238"></a>
+<span class="sourceLineNo">239</span>   * The offset of this block in the file. Populated by the reader for<a name="line.239"></a>
+<span class="sourceLineNo">240</span>   * convenience of access. This offset is not part of the block header.<a name="line.240"></a>
+<span class="sourceLineNo">241</span>   */<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  private long offset = UNSET;<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>  /**<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * The on-disk size of the next block, including the header, obtained by<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * peeking into the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the next block's<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   * header, or -1 if unknown.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>   */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  private int nextBlockOnDiskSizeWithHeader = UNSET;<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>  /**<a name="line.253"></a>
+<span class="sourceLineNo">254</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.254"></a>
+<span class="sourceLineNo">255</span>   * is used when the block data has already been read and uncompressed,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>   * and is sitting in a byte buffer.<a name="line.256"></a>
+<span class="sourceLineNo">257</span>   *<a name="line.257"></a>
+<span class="sourceLineNo">258</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.258"></a>
+<span class="sourceLineNo">259</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.259"></a>
+<span class="sourceLineNo">260</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.260"></a>
+<span class="sourceLineNo">261</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.261"></a>
+<span class="sourceLineNo">262</span>   * @param buf block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes) followed by<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   *          uncompressed data.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @param offset the file offset the block was read from<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * @param fileContext HFile meta data<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   */<a name="line.268"></a>
+<span class="sourceLineNo">269</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      long prevBlockOffset, ByteBuff buf, boolean fillHeader, long offset,<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.blockType = blockType;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    this.buf = buf;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    this.offset = offset;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    this.fileContext = fileContext;<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    if (fillHeader) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      overwriteHeader();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.buf.rewind();<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    this(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset,<a name="line.289"></a>
+<span class="sourceLineNo">290</span>        new SingleByteBuff(buf), fillHeader, offset, onDiskDataSizeWithHeader, fileContext);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>  /**<a name="line.293"></a>
+<span class="sourceLineNo">294</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.294"></a>
+<span class="sourceLineNo">295</span>   */<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  HFileBlock(HFileBlock that) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    this.blockType = that.blockType;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    this.prevBlockOffset = that.prevBlockOffset;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    this.buf = that.buf.duplicate();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    this.offset = that.offset;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.fileContext = that.fileContext;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;<a name="line.305"></a>
 <span class="sourceLineNo">306</span>  }<a name="line.306"></a>
 <span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  public short getDataBlockEncodingId() {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.311"></a>
-<span class="sourceLineNo">312</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    return buf.getShort(headerSize());<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  }<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>  /**<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @return the on-disk size of header + data part + checksum.<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   */<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  public int getOnDiskSizeWithHeader() {<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   */<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  public int getOnDiskSizeWithoutHeader() {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    return onDiskSizeWithoutHeader;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  }<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  /**<a name="line.331"></a>
-<span class="sourceLineNo">332</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   public int getUncompressedSizeWithoutHeader() {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    return uncompressedSizeWithoutHeader;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>  }<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   *         -1 if unknown<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public long getPrevBlockOffset() {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    return prevBlockOffset;<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * is modified as side-effect.<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   */<a name="line.349"></a>
-<span class="sourceLineNo">350</span>  private void overwriteHeader() {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    buf.rewind();<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    blockType.write(buf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    buf.putLong(prevBlockOffset);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span>  }<a name="line.361"></a>
-<span class="sourceLineNo">362</span><a name="line.362"></a>
-<span class="sourceLineNo">363</span>  /**<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * Returns a buffer that does not include the header or checksum.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   *<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.366"></a>
+<span class="sourceLineNo">308</span>  HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    this(new SingleByteBuff(b), usesHBaseChecksum);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * to that point.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum) throws IOException {<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    this(b, usesHBaseChecksum, MemoryType.EXCLUSIVE);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * to that point.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   */<a name="line.327"></a>
+<span class="sourceLineNo">328</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum, MemoryType memType) throws IOException {<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    b.rewind();<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    blockType = BlockType.read(b);<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    onDiskSizeWithoutHeader = b.getInt();<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    uncompressedSizeWithoutHeader = b.getInt();<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    prevBlockOffset = b.getLong();<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    HFileContextBuilder contextBuilder = new HFileContextBuilder();<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    contextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    if (usesHBaseChecksum) {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>      contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>      contextBuilder.withBytesPerCheckSum(b.getInt());<a name="line.338"></a>
+<span class="sourceLineNo">339</span>      this.onDiskDataSizeWithHeader = b.getInt();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    } else {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      contextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.341"></a>
+<span class="sourceLineNo">342</span>      contextBuilder.withBytesPerCheckSum(0);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      this.onDiskDataSizeWithHeader =<a name="line.343"></a>
+<span class="sourceLineNo">344</span>          onDiskSizeWithoutHeader + HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    this.fileContext = contextBuilder.build();<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    this.memType = memType;<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    buf = b;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    buf.rewind();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>  }<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>  public BlockType getBlockType() {<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    return blockType;<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  public short getDataBlockEncodingId() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.358"></a>
+<span class="sourceLineNo">359</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    }<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return buf.getShort(headerSize());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the on-disk size of header + data part + checksum.<a name="line.366"></a>
 <span class="sourceLineNo">367</span>   */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ByteBuff dup = this.buf.duplicate();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    dup.position(headerSize());<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    dup.limit(buf.limit() - totalChecksumBytes());<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    return dup.slice();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>  }<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>  /**<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * Returns the buffer this block stores internally. The clients must not<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * modify the buffer object. This method has to be public because it is used<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * filter lookup, but has to be used with caution. Checksum data is not<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * included in the returned buffer but header data is.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   *<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * @return the buffer of this block for read-only operations<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public ByteBuff getBufferReadOnly() {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    ByteBuff dup = this.buf.duplicate();<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    dup.limit(buf.limit() - totalChecksumBytes());<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    return dup.slice();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>  /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>   * Returns the buffer of this block, including header data. The clients must<a name="line.391"></a>
-<span class="sourceLineNo">392</span>   * not modify the buffer object. This method has to be public because it is<a name="line.392"></a>
-<span class="sourceLineNo">393</span>   * used in {@link org.apache.hadoop.hbase.io.hfile.bucket.BucketCache} to avoid buffer copy.<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   *<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @return the buffer with header and checksum included for read-only operations<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   */<a name="line.396"></a>
-<span class="sourceLineNo">397</span>  public ByteBuff getBufferReadOnlyWithHeader() {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    ByteBuff dup = this.buf.duplicate();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    return dup.slice();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>  /**<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   * Returns a byte buffer of this block, including header data and checksum, positioned at<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * the beginning of header. The underlying data array is not copied.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   *<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @return the byte buffer with header and checksum included<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  ByteBuff getBufferWithHeader() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    ByteBuff dupBuf = buf.duplicate();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    dupBuf.rewind();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    return dupBuf;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      String fieldName) throws IOException {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    if (valueFromBuf != valueFromField) {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.417"></a>
-<span class="sourceLineNo">418</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    }<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.422"></a>
-<span class="sourceLineNo">423</span>      throws IOException {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (valueFromBuf != valueFromField) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   * This function is primary for testing and debugging, and is not<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  void sanityCheck() throws IOException {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    buf.rewind();<a name="line.438"></a>
-<span class="sourceLineNo">439</span><a name="line.439"></a>
-<span class="sourceLineNo">440</span>    sanityCheckAssertion(BlockType.read(buf), blockType);<a name="line.440"></a>
-<span class="sourceLineNo">441</span><a name="line.441"></a>
-<span class="sourceLineNo">442</span>    sanityCheckAssertion(buf.getInt(), onDiskSizeWithoutHeader,<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        "onDiskSizeWithoutHeader");<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    sanityCheckAssertion(buf.getInt(), uncompressedSizeWithoutHeader,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        "uncompressedSizeWithoutHeader");<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>    sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset");<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.451"></a>
-<span class="sourceLineNo">452</span>          "bytesPerChecksum");<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    }<a name="line.454"></a>
-<span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>    int cksumBytes = totalChecksumBytes();<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    if (buf.limit() != expectedBufLimit) {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      throw new AssertionError("Expected buffer limit " + expectedBufLimit<a name="line.459"></a>
-<span class="sourceLineNo">460</span>          + ", got " + buf.limit());<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    int hdrSize = headerSize();<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    if (buf.capacity() != expectedBufLimit &amp;&amp;<a name="line.466"></a>
-<span class="sourceLineNo">467</span>        buf.capacity() != expectedBufLimit + hdrSize) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      throw new AssertionError("Invalid buffer capacity: " + buf.capacity() +<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    }<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>  @Override<a name="line.473"></a>
-<span class="sourceLineNo">474</span>  public String toString() {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    StringBuilder sb = new StringBuilder()<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      .append("HFileBlock [")<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      .append(" fileOffset=").append(offset)<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      .append(" headerSize()=").append(headerSize())<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      .append(" blockType=").append(blockType)<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      .append(" onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      .append(" uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      .append(" prevBlockOffset=").append(prevBlockOffset)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      .append(" isUseHBaseChecksum()=").append(fileContext.isUseHBaseChecksum());<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      sb.append(" checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        .append(" bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        .append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    } else {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      sb.append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.490"></a>
-<span class="sourceLineNo">491</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    String dataBegin = null;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    if (buf.hasArray()) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    } else {<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    }<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    sb.append(" getOnDiskSizeWithHeader()=").append(getOnDiskSizeWithHeader())<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      .append(" totalChecksumBytes()=").append(totalChecksumBytes())<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      .append(" isUnpacked()=").append(isUnpacked())<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      .append(" buf=[ ").append(buf).append(" ]")<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      .append(" dataBeginsWith=").append(dataBegin)<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      .append(" fileContext=").append(fileContext)<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      .append(" ]");<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    return sb.toString();<a name="line.511"></a>
-<span class="sourceLineNo">512</span>  }<a name="line.512"></a>
-<span class="sourceLineNo">513</span><a name="line.513"></a>
-<span class="sourceLineNo">514</span>  /**<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * Called after reading a block with provided onDiskSizeWithHeader.<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   */<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  private void validateOnDiskSizeWithoutHeader(int expectedOnDiskSizeWithoutHeader)<a name="line.517"></a>
-<span class="sourceLineNo">518</span>  throws IOException {<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    if (onDiskSizeWithoutHeader != expectedOnDiskSizeWithoutHeader) {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      String dataBegin = null;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>      if (buf.hasArray()) {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>        dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset(), Math.min(32, buf.limit()));<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      } else {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        ByteBuff bufDup = getBufferReadOnly();<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        byte[] dataBeginBytes = new byte[Math.min(32, bufDup.limit() - bufDup.position())];<a name="line.525"></a>
-<span class="sourceLineNo">526</span>        bufDup.get(dataBeginBytes);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String blockInfoMsg =<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        "Block offset: " + offset + ", data starts with: " + dataBegin;<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new IOException("On-disk size without header provided is "<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + expectedOnDiskSizeWithoutHeader + ", but block "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>          + "header contains " + onDiskSizeWithoutHeader + ". " +<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          blockInfoMsg);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  }<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>  /**<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      // encryption details.<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      return this;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span><a name="line.549"></a>
-<span class="sourceLineNo">550</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.551"></a>
-<span class="sourceLineNo">552</span><a name="line.552"></a>
-<span class="sourceLineNo">553</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.553"></a>
-<span clas

<TRUNCATED>

[20/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html
index 8e94eb6..6e7cf22 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.html
@@ -33,566 +33,565 @@
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.ByteBufferUtils;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.39"></a>
-<span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>/**<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * These methods have the same definition as any implementation of the EncodedSeeker.<a name="line.42"></a>
-<span class="sourceLineNo">043</span> *<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * In the future, the EncodedSeeker could be modified to work with the Cell interface directly.  It<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * currently returns a new KeyValue object each time getKeyValue is called.  This is not horrible,<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * but in order to create a new KeyValue object, we must first allocate a new byte[] and copy in<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * the data from the PrefixTreeCell.  It is somewhat heavyweight right now.<a name="line.47"></a>
-<span class="sourceLineNo">048</span> */<a name="line.48"></a>
-<span class="sourceLineNo">049</span>@InterfaceAudience.Private<a name="line.49"></a>
-<span class="sourceLineNo">050</span>public class PrefixTreeSeeker implements EncodedSeeker {<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>  protected ByteBuffer block;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  protected boolean includeMvccVersion;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  protected PrefixTreeArraySearcher ptSearcher;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  public PrefixTreeSeeker(boolean includeMvccVersion) {<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    this.includeMvccVersion = includeMvccVersion;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  }<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  @Override<a name="line.60"></a>
-<span class="sourceLineNo">061</span>  public void setCurrentBuffer(ByteBuff fullBlockBuffer) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>    ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>    rewind();<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  /**<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   * &lt;p&gt;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>   * Currently unused.<a name="line.68"></a>
-<span class="sourceLineNo">069</span>   * &lt;/p&gt;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>   * TODO performance leak. should reuse the searchers. hbase does not currently have a hook where<a name="line.70"></a>
-<span class="sourceLineNo">071</span>   * this can be called<a name="line.71"></a>
-<span class="sourceLineNo">072</span>   */<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  public void releaseCurrentSearcher(){<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    DecoderFactory.checkIn(ptSearcher);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.ByteBufferUtils;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>/**<a name="line.40"></a>
+<span class="sourceLineNo">041</span> * These methods have the same definition as any implementation of the EncodedSeeker.<a name="line.41"></a>
+<span class="sourceLineNo">042</span> *<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * In the future, the EncodedSeeker could be modified to work with the Cell interface directly.  It<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * currently returns a new KeyValue object each time getKeyValue is called.  This is not horrible,<a name="line.44"></a>
+<span class="sourceLineNo">045</span> * but in order to create a new KeyValue object, we must first allocate a new byte[] and copy in<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * the data from the PrefixTreeCell.  It is somewhat heavyweight right now.<a name="line.46"></a>
+<span class="sourceLineNo">047</span> */<a name="line.47"></a>
+<span class="sourceLineNo">048</span>@InterfaceAudience.Private<a name="line.48"></a>
+<span class="sourceLineNo">049</span>public class PrefixTreeSeeker implements EncodedSeeker {<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>  protected ByteBuffer block;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  protected boolean includeMvccVersion;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  protected PrefixTreeArraySearcher ptSearcher;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  public PrefixTreeSeeker(boolean includeMvccVersion) {<a name="line.55"></a>
+<span class="sourceLineNo">056</span>    this.includeMvccVersion = includeMvccVersion;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
+<span class="sourceLineNo">058</span><a name="line.58"></a>
+<span class="sourceLineNo">059</span>  @Override<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  public void setCurrentBuffer(ByteBuff fullBlockBuffer) {<a name="line.60"></a>
+<span class="sourceLineNo">061</span>    ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion);<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    rewind();<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  }<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  /**<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * &lt;p&gt;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   * Currently unused.<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   * &lt;/p&gt;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   * TODO performance leak. should reuse the searchers. hbase does not currently have a hook where<a name="line.69"></a>
+<span class="sourceLineNo">070</span>   * this can be called<a name="line.70"></a>
+<span class="sourceLineNo">071</span>   */<a name="line.71"></a>
+<span class="sourceLineNo">072</span>  public void releaseCurrentSearcher(){<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    DecoderFactory.checkIn(ptSearcher);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  }<a name="line.74"></a>
+<span class="sourceLineNo">075</span><a name="line.75"></a>
 <span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  @Override<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  public Cell getKey() {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    return ptSearcher.current();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  }<a name="line.81"></a>
+<span class="sourceLineNo">077</span>  @Override<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  public Cell getKey() {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    return ptSearcher.current();<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
+<span class="sourceLineNo">081</span><a name="line.81"></a>
 <span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  @Override<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public ByteBuffer getValueShallowCopy() {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    return CellUtil.getValueBufferShallowCopy(ptSearcher.current());<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  }<a name="line.87"></a>
-<span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>  /**<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * currently must do deep copy into new array<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   */<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  @Override<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public Cell getCell() {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    // The PrefixTreecell is of type BytebufferedCell and the value part of the cell<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    // determines whether we are offheap cell or onheap cell.  All other parts of the cell-<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    // row, fam and col are all represented as onheap byte[]<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current();<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    if (cell == null) {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      return null;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    }<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    // Use the ByteBuffered cell to see if the Cell is onheap or offheap<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    if (cell.getValueByteBuffer().hasArray()) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.103"></a>
-<span class="sourceLineNo">104</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.105"></a>
-<span class="sourceLineNo">106</span>          cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),<a name="line.106"></a>
-<span class="sourceLineNo">107</span>          cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          cell.getSequenceId());<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    } else {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.110"></a>
-<span class="sourceLineNo">111</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.111"></a>
-<span class="sourceLineNo">112</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.112"></a>
-<span class="sourceLineNo">113</span>          cell.getValueByteBuffer(), cell.getValuePosition(), cell.getValueLength(),<a name="line.113"></a>
-<span class="sourceLineNo">114</span>          cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(),<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          cell.getTypeByte(), cell.getSequenceId());<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    }<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  }<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  /**<a name="line.119"></a>
-<span class="sourceLineNo">120</span>   * &lt;p&gt;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   * Currently unused.<a name="line.121"></a>
-<span class="sourceLineNo">122</span>   * &lt;/p&gt;&lt;p&gt;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   * A nice, lightweight reference, though the underlying cell is transient. This method may return<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * the same reference to the backing PrefixTreeCell repeatedly, while other implementations may<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * return a different reference for each Cell.<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * &lt;/p&gt;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to<a name="line.127"></a>
-<span class="sourceLineNo">128</span>   * use this method instead of the getKeyValue() methods above.<a name="line.128"></a>
-<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  public Cell get() {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    return ptSearcher.current();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  }<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  @Override<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public void rewind() {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    ptSearcher.positionAtFirstCell();<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  @Override<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  public boolean next() {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    return ptSearcher.advance();<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
-<span class="sourceLineNo">143</span><a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public boolean advance() {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    return ptSearcher.advance();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>  }<a name="line.146"></a>
+<span class="sourceLineNo">083</span>  @Override<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  public ByteBuffer getValueShallowCopy() {<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    return CellUtil.getValueBufferShallowCopy(ptSearcher.current());<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  }<a name="line.86"></a>
+<span class="sourceLineNo">087</span><a name="line.87"></a>
+<span class="sourceLineNo">088</span>  /**<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * currently must do deep copy into new array<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  @Override<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  public Cell getCell() {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    // The PrefixTreecell is of type BytebufferedCell and the value part of the cell<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    // determines whether we are offheap cell or onheap cell.  All other parts of the cell-<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    // row, fam and col are all represented as onheap byte[]<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current();<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    if (cell == null) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      return null;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    }<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    // Use the ByteBuffered cell to see if the Cell is onheap or offheap<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    if (cell.getValueByteBuffer().hasArray()) {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.102"></a>
+<span class="sourceLineNo">103</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.104"></a>
+<span class="sourceLineNo">105</span>          cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),<a name="line.106"></a>
+<span class="sourceLineNo">107</span>          cell.getSequenceId());<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    } else {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.109"></a>
+<span class="sourceLineNo">110</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.110"></a>
+<span class="sourceLineNo">111</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.111"></a>
+<span class="sourceLineNo">112</span>          cell.getValueByteBuffer(), cell.getValuePosition(), cell.getValueLength(),<a name="line.112"></a>
+<span class="sourceLineNo">113</span>          cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(),<a name="line.113"></a>
+<span class="sourceLineNo">114</span>          cell.getTypeByte(), cell.getSequenceId());<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * &lt;p&gt;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * Currently unused.<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   * &lt;/p&gt;&lt;p&gt;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * A nice, lightweight reference, though the underlying cell is transient. This method may return<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * the same reference to the backing PrefixTreeCell repeatedly, while other implementations may<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   * return a different reference for each Cell.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   * &lt;/p&gt;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to<a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * use this method instead of the getKeyValue() methods above.<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  public Cell get() {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    return ptSearcher.current();<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public void rewind() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    ptSearcher.positionAtFirstCell();<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>  @Override<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  public boolean next() {<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    return ptSearcher.advance();<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  }<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public boolean advance() {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    return ptSearcher.advance();<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
+<span class="sourceLineNo">146</span><a name="line.146"></a>
 <span class="sourceLineNo">147</span><a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  private static final boolean USE_POSITION_BEFORE = false;<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  /*<a name="line.151"></a>
-<span class="sourceLineNo">152</span>   * Support both of these options since the underlying PrefixTree supports<a name="line.152"></a>
-<span class="sourceLineNo">153</span>   * both. Possibly expand the EncodedSeeker to utilize them both.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   */<a name="line.154"></a>
-<span class="sourceLineNo">155</span><a name="line.155"></a>
-<span class="sourceLineNo">156</span>  protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    // this does a deep copy of the key byte[] because the CellSearcher<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    // interface wants a Cell<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>    if (CellScannerPosition.AT == position) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      if (seekBefore) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        ptSearcher.previous();<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        return 1;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      return 0;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    }<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    return 1;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) {<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    // should probably switch this to use the seekForwardToOrBefore method<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    if (CellScannerPosition.AT == position) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      if (seekBefore) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>        ptSearcher.previous();<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        return 1;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      return 0;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>    if (CellScannerPosition.AFTER == position) {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      if (!ptSearcher.isBeforeFirst()) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>        ptSearcher.previous();<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      }<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      return 1;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    }<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>    if (position == CellScannerPosition.AFTER_LAST) {<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      if (seekBefore) {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        ptSearcher.previous();<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      return 1;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>    throw new RuntimeException("unexpected CellScannerPosition:" + position);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>  }<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>  @Override<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    if (USE_POSITION_BEFORE) {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    } else {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      return seekToOrBeforeUsingPositionAtOrAfter(key, forceBeforeOnExactMatch);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    }<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  }<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>  @Override<a name="line.211"></a>
-<span class="sourceLineNo">212</span>  public int compareKey(CellComparator comparator, Cell key) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    return comparator.compare(key,<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        ptSearcher.current());<a name="line.214"></a>
-<span class="sourceLineNo">215</span>  }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /**<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * Cloned version of the PrefixTreeCell where except the value part, the rest<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   * of the key part is deep copied<a name="line.219"></a>
-<span class="sourceLineNo">220</span>   *<a name="line.220"></a>
-<span class="sourceLineNo">221</span>   */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    private byte[] row;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    private short rowLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    private byte[] fam;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    private byte famLength;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    private byte[] qual;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    private int qualLength;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    private byte[] val;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    private int valOffset;<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    private int valLength;<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    private byte[] tag;<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    private int tagsLength;<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    private long ts;<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    private long seqId;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private byte type;<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val,<a name="line.242"></a>
-<span class="sourceLineNo">243</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        long seqId) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      this.row = new byte[rowLength];<a name="line.245"></a>
-<span class="sourceLineNo">246</span>      System.arraycopy(row, rowOffset, this.row, 0, rowLength);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      this.rowLength = rowLength;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      this.fam = new byte[famLength];<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      System.arraycopy(fam, famOffset, this.fam, 0, famLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      this.famLength = famLength;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>      this.qual = new byte[qualLength];<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      System.arraycopy(qual, qualOffset, this.qual, 0, qualLength);<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      this.qualLength = qualLength;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      this.tag = new byte[tagLength];<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      System.arraycopy(tag, tagOffset, this.tag, 0, tagLength);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      this.tagsLength = tagLength;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      this.val = val;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      this.valLength = valLength;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.valOffset = valOffset;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.ts = ts;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      this.seqId = seqId;<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      this.type = type;<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    }<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>    @Override<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    public void setSequenceId(long seqId) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.seqId = seqId;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    @Override<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public byte[] getRowArray() {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      return this.row;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    }<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>    @Override<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    public int getRowOffset() {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      return 0;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    }<a name="line.278"></a>
-<span class="sourceLineNo">279</span><a name="line.279"></a>
-<span class="sourceLineNo">280</span>    @Override<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    public short getRowLength() {<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      return this.rowLength;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span><a name="line.284"></a>
-<span class="sourceLineNo">285</span>    @Override<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    public byte[] getFamilyArray() {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      return this.fam;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>    @Override<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    public int getFamilyOffset() {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return 0;<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>    @Override<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    public byte getFamilyLength() {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      return this.famLength;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    @Override<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    public byte[] getQualifierArray() {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      return this.qual;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    }<a name="line.303"></a>
-<span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>    @Override<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    public int getQualifierOffset() {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      return 0;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>    @Override<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    public int getQualifierLength() {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      return this.qualLength;<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>    @Override<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    public long getTimestamp() {<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      return ts;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    }<a name="line.318"></a>
-<span class="sourceLineNo">319</span><a name="line.319"></a>
-<span class="sourceLineNo">320</span>    @Override<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    public byte getTypeByte() {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      return type;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    }<a name="line.323"></a>
-<span class="sourceLineNo">324</span><a name="line.324"></a>
-<span class="sourceLineNo">325</span>    @Override<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    public long getSequenceId() {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      return seqId;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    }<a name="line.328"></a>
-<span class="sourceLineNo">329</span><a name="line.329"></a>
-<span class="sourceLineNo">330</span>    @Override<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    public byte[] getValueArray() {<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      return val;<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    }<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>    @Override<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    public int getValueOffset() {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>      return this.valOffset;<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    }<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>    @Override<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    public int getValueLength() {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>      return this.valLength;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    }<a name="line.343"></a>
-<span class="sourceLineNo">344</span><a name="line.344"></a>
-<span class="sourceLineNo">345</span>    @Override<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    public byte[] getTagsArray() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      return this.tag;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    }<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>    @Override<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    public int getTagsOffset() {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return 0;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span><a name="line.354"></a>
-<span class="sourceLineNo">355</span>    @Override<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    public int getTagsLength() {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      return this.tagsLength;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>    @Override<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    public String toString() {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.364"></a>
-<span class="sourceLineNo">365</span>          getQualifierLength());<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.367"></a>
-<span class="sourceLineNo">368</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>    @Override<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    public long heapSize() {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    }<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell,<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      SettableSequenceId, HeapSize {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.379"></a>
-<span class="sourceLineNo">380</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER));<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    private ByteBuffer rowBuff;<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    private short rowLength;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>    private ByteBuffer famBuff;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    private byte famLength;<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    private ByteBuffer qualBuff;<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    private int qualLength;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    private ByteBuffer val;<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    private int valOffset;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private int valLength;<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    private ByteBuffer tagBuff;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    private int tagsLength;<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    private long ts;<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private long seqId;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    private byte type;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>        long seqId) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>      byte[] tmpRow = new byte[rowLength];<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      System.arraycopy(row, rowOffset, tmpRow, 0, rowLength);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      this.rowBuff = ByteBuffer.wrap(tmpRow);<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      this.rowLength = rowLength;<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      byte[] tmpFam = new byte[famLength];<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      System.arraycopy(fam, famOffset, tmpFam, 0, famLength);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      this.famBuff = ByteBuffer.wrap(tmpFam);<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      this.famLength = famLength;<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      byte[] tmpQual = new byte[qualLength];<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength);<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      this.qualBuff = ByteBuffer.wrap(tmpQual);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      this.qualLength = qualLength;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>      byte[] tmpTag = new byte[tagLength];<a name="line.412"></a>
-<span class="sourceLineNo">413</span>      System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      this.tagBuff = ByteBuffer.wrap(tmpTag);<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      this.tagsLength = tagLength;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>      this.val = val;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      this.valLength = valLength;<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      this.valOffset = valOffset;<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      this.ts = ts;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>      this.seqId = seqId;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      this.type = type;<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    <a name="line.423"></a>
-<span class="sourceLineNo">424</span>    @Override<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    public void setSequenceId(long seqId) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>      this.seqId = seqId;<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    }<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    @Override<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    public byte[] getRowArray() {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      return this.rowBuff.array();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    public int getRowOffset() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      return getRowPosition();<a name="line.436"></a>
-<span class="sourceLineNo">437</span>    }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    public short getRowLength() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>      return this.rowLength;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>    @Override<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    public byte[] getFamilyArray() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      return this.famBuff.array();<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
-<span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    @Override<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    public int getFamilyOffset() {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return getFamilyPosition();<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>    @Override<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    public byte getFamilyLength() {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>      return this.famLength;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    @Override<a name="line.459"></a>
-<span class="sourceLineNo">460</span>    public byte[] getQualifierArray() {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      return this.qualBuff.array();<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    }<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    @Override<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    public int getQualifierOffset() {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      return getQualifierPosition();<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
-<span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>    @Override<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    public int getQualifierLength() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      return this.qualLength;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @Override<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    public long getTimestamp() {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      return ts;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    }<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>    @Override<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    public byte getTypeByte() {<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      return type;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    }<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    @Override<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    public long getSequenceId() {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      return seqId;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    @Override<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    public byte[] getValueArray() {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      byte[] tmpVal = new byte[valLength];<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      ByteBufferUtils.copyFromBufferToArray(tmpVal, val, valOffset, 0, valLength);<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      return tmpVal;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>    @Override<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    public int getValueOffset() {<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      return 0;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    }<a name="line.499"></a>
-<span class="sourceLineNo">500</span><a name="line.500"></a>
-<span class="sourceLineNo">501</span>    @Override<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    public int getValueLength() {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      return this.valLength;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    }<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>    @Override<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    public byte[] getTagsArray() {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      return this.tagBuff.array();<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public int getTagsOffset() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return getTagsPosition();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getTagsLength() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return this.tagsLength;<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>    <a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public ByteBuffer getRowByteBuffer() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowBuff;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    <a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public int getRowPosition() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return 0;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    <a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return this.famBuff;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    <a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public int getFamilyPosition() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return 0;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    <a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.qualBuff;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierPosition() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return 0;<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.tagBuff;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public int getTagsPosition() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return 0;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public ByteBuffer getValueByteBuffer() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.val;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public int getValuePosition() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.valOffset;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public long heapSize() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public String toString() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.580"></a>
-<span class="sourceLineNo">581</span>          getQualifierLength());<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.583"></a>
-<span class="sourceLineNo">584</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    }<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>}<a name="line.587"></a>
+<span class="sourceLineNo">148</span>  private static final boolean USE_POSITION_BEFORE = false;<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>  /*<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * Support both of these options since the underlying PrefixTree supports<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * both. Possibly expand the EncodedSeeker to utilize them both.<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   */<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>    // this does a deep copy of the key byte[] because the CellSearcher<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    // interface wants a Cell<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>    if (CellScannerPosition.AT == position) {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      if (seekBefore) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        ptSearcher.previous();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>        return 1;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      return 0;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
+<span class="sourceLineNo">167</span><a name="line.167"></a>
+<span class="sourceLineNo">168</span>    return 1;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  }<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>  protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) {<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    // should probably switch this to use the seekForwardToOrBefore method<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>    if (CellScannerPosition.AT == position) {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      if (seekBefore) {<a name="line.176"></a>
+<span class="sourceLineNo">177</span>        ptSearcher.previous();<a name="line.177"></a>
+<span class="sourceLineNo">178</span>        return 1;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      }<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      return 0;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>    if (CellScannerPosition.AFTER == position) {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      if (!ptSearcher.isBeforeFirst()) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>        ptSearcher.previous();<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      }<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      return 1;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    }<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>    if (position == CellScannerPosition.AFTER_LAST) {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      if (seekBefore) {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        ptSearcher.previous();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      }<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      return 1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>    throw new RuntimeException("unexpected CellScannerPosition:" + position);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>  @Override<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) {<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    if (USE_POSITION_BEFORE) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    } else {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      return seekToOrBeforeUsingPositionAtOrAfter(key, forceBeforeOnExactMatch);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>  }<a name="line.208"></a>
+<span class="sourceLineNo">209</span><a name="line.209"></a>
+<span class="sourceLineNo">210</span>  @Override<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  public int compareKey(CellComparator comparator, Cell key) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    return comparator.compare(key,<a name="line.212"></a>
+<span class="sourceLineNo">213</span>        ptSearcher.current());<a name="line.213"></a>
+<span class="sourceLineNo">214</span>  }<a name="line.214"></a>
+<span class="sourceLineNo">215</span><a name="line.215"></a>
+<span class="sourceLineNo">216</span>  /**<a name="line.216"></a>
+<span class="sourceLineNo">217</span>   * Cloned version of the PrefixTreeCell where except the value part, the rest<a name="line.217"></a>
+<span class="sourceLineNo">218</span>   * of the key part is deep copied<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   *<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   */<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    private byte[] row;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    private short rowLength;<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    private byte[] fam;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    private byte famLength;<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    private byte[] qual;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    private int qualLength;<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    private byte[] val;<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    private int valOffset;<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    private int valLength;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    private byte[] tag;<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    private int tagsLength;<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    private long ts;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    private long seqId;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    private byte type;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>    public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.240"></a>
+<span class="sourceLineNo">241</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val,<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.242"></a>
+<span class="sourceLineNo">243</span>        long seqId) {<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      this.row = new byte[rowLength];<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      System.arraycopy(row, rowOffset, this.row, 0, rowLength);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      this.rowLength = rowLength;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      this.fam = new byte[famLength];<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      System.arraycopy(fam, famOffset, this.fam, 0, famLength);<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      this.famLength = famLength;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      this.qual = new byte[qualLength];<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      System.arraycopy(qual, qualOffset, this.qual, 0, qualLength);<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      this.qualLength = qualLength;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      this.tag = new byte[tagLength];<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      System.arraycopy(tag, tagOffset, this.tag, 0, tagLength);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      this.tagsLength = tagLength;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      this.val = val;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      this.valLength = valLength;<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      this.valOffset = valOffset;<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      this.ts = ts;<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.seqId = seqId;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.type = type;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    @Override<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    public void setSequenceId(long seqId) {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      this.seqId = seqId;<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    }<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>    @Override<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    public byte[] getRowArray() {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      return this.row;<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    }<a name="line.272"></a>
+<span class="sourceLineNo">273</span><a name="line.273"></a>
+<span class="sourceLineNo">274</span>    @Override<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    public int getRowOffset() {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      return 0;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    }<a name="line.277"></a>
+<span class="sourceLineNo">278</span><a name="line.278"></a>
+<span class="sourceLineNo">279</span>    @Override<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public short getRowLength() {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this.rowLength;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public byte[] getFamilyArray() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return this.fam;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public int getFamilyOffset() {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      return 0;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    @Override<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public byte getFamilyLength() {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return this.famLength;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>    @Override<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    public byte[] getQualifierArray() {<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      return this.qual;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>    @Override<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    public int getQualifierOffset() {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      return 0;<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>    @Override<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    public int getQualifierLength() {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>      return this.qualLength;<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
+<span class="sourceLineNo">313</span><a name="line.313"></a>
+<span class="sourceLineNo">314</span>    @Override<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    public long getTimestamp() {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>      return ts;<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>    @Override<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    public byte getTypeByte() {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      return type;<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>    @Override<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    public long getSequenceId() {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      return seqId;<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    @Override<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    public byte[] getValueArray() {<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      return val;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    }<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>    @Override<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    public int getValueOffset() {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>      return this.valOffset;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    }<a name="line.337"></a>
+<span class="sourceLineNo">338</span><a name="line.338"></a>
+<span class="sourceLineNo">339</span>    @Override<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    public int getValueLength() {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      return this.valLength;<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    }<a name="line.342"></a>
+<span class="sourceLineNo">343</span><a name="line.343"></a>
+<span class="sourceLineNo">344</span>    @Override<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    public byte[] getTagsArray() {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>      return this.tag;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    }<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>    @Override<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    public int getTagsOffset() {<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      return 0;<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    }<a name="line.352"></a>
+<span class="sourceLineNo">353</span><a name="line.353"></a>
+<span class="sourceLineNo">354</span>    @Override<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    public int getTagsLength() {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      return this.tagsLength;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span><a name="line.358"></a>
+<span class="sourceLineNo">359</span>    @Override<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    public String toString() {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.361"></a>
+<span class="sourceLineNo">362</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          getQualifierLength());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.366"></a>
+<span class="sourceLineNo">367</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    }<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>    @Override<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    public long heapSize() {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  }<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell,<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      SettableSequenceId, HeapSize {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER));<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    private ByteBuffer rowBuff;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    private short rowLength;<a name="line.382"></a>
+<span class="sourceLineNo">383</span>    private ByteBuffer famBuff;<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    private byte famLength;<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    private ByteBuffer qualBuff;<a name="line.385"></a>
+<span class="sourceLineNo">386</span>    private int qualLength;<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    private ByteBuffer val;<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    private int valOffset;<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    private int valLength;<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    private ByteBuffer tagBuff;<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    private int tagsLength;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    private long ts;<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    private long seqId;<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    private byte type;<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val,<a name="line.396"></a>
+<span class="sourceLineNo">397</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        long seqId) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      byte[] tmpRow = new byte[rowLength];<a name="line.399"></a>
+<span class="sourceLineNo">400</span>      System.arraycopy(row, rowOffset, tmpRow, 0, rowLength);<a name="line.400"></a>
+<span class="sourceLineNo">401</span>      this.rowBuff = ByteBuffer.wrap(tmpRow);<a name="line.401"></a>
+<span class="sourceLineNo">402</span>      this.rowLength = rowLength;<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      byte[] tmpFam = new byte[famLength];<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      System.arraycopy(fam, famOffset, tmpFam, 0, famLength);<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      this.famBuff = ByteBuffer.wrap(tmpFam);<a name="line.405"></a>
+<span class="sourceLineNo">406</span>      this.famLength = famLength;<a name="line.406"></a>
+<span class="sourceLineNo">407</span>      byte[] tmpQual = new byte[qualLength];<a name="line.407"></a>
+<span class="sourceLineNo">408</span>      System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength);<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      this.qualBuff = ByteBuffer.wrap(tmpQual);<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      this.qualLength = qualLength;<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      byte[] tmpTag = new byte[tagLength];<a name="line.411"></a>
+<span class="sourceLineNo">412</span>      System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength);<a name="line.412"></a>
+<span class="sourceLineNo">413</span>      this.tagBuff = ByteBuffer.wrap(tmpTag);<a name="line.413"></a>
+<span class="sourceLineNo">414</span>      this.tagsLength = tagLength;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>      this.val = val;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>      this.valLength = valLength;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      this.valOffset = valOffset;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>      this.ts = ts;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      this.seqId = seqId;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>      this.type = type;<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    }<a name="line.421"></a>
+<span class="sourceLineNo">422</span>    <a name="line.422"></a>
+<span class="sourceLineNo">423</span>    @Override<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    public void setSequenceId(long seqId) {<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      this.seqId = seqId;<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span><a name="line.427"></a>
+<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    public byte[] getRowArray() {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      return this.rowBuff.array();<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    public int getRowOffset() {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      return getRowPosition();<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    public short getRowLength() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      return this.rowLength;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>    public byte[] getFamilyArray() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      return this.famBuff.array();<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>    @Override<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    public int getFamilyOffset() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      return getFamilyPosition();<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    }<a name="line.451"></a>
+<span class="sourceLineNo">452</span><a name="line.452"></a>
+<span class="sourceL

<TRUNCATED>

[21/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html
index 8e94eb6..6e7cf22 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.OnheapPrefixTreeCell.html
@@ -33,566 +33,565 @@
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.ByteBufferUtils;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.39"></a>
-<span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>/**<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * These methods have the same definition as any implementation of the EncodedSeeker.<a name="line.42"></a>
-<span class="sourceLineNo">043</span> *<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * In the future, the EncodedSeeker could be modified to work with the Cell interface directly.  It<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * currently returns a new KeyValue object each time getKeyValue is called.  This is not horrible,<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * but in order to create a new KeyValue object, we must first allocate a new byte[] and copy in<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * the data from the PrefixTreeCell.  It is somewhat heavyweight right now.<a name="line.47"></a>
-<span class="sourceLineNo">048</span> */<a name="line.48"></a>
-<span class="sourceLineNo">049</span>@InterfaceAudience.Private<a name="line.49"></a>
-<span class="sourceLineNo">050</span>public class PrefixTreeSeeker implements EncodedSeeker {<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>  protected ByteBuffer block;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  protected boolean includeMvccVersion;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  protected PrefixTreeArraySearcher ptSearcher;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  public PrefixTreeSeeker(boolean includeMvccVersion) {<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    this.includeMvccVersion = includeMvccVersion;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  }<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  @Override<a name="line.60"></a>
-<span class="sourceLineNo">061</span>  public void setCurrentBuffer(ByteBuff fullBlockBuffer) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>    ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>    rewind();<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  /**<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   * &lt;p&gt;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>   * Currently unused.<a name="line.68"></a>
-<span class="sourceLineNo">069</span>   * &lt;/p&gt;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>   * TODO performance leak. should reuse the searchers. hbase does not currently have a hook where<a name="line.70"></a>
-<span class="sourceLineNo">071</span>   * this can be called<a name="line.71"></a>
-<span class="sourceLineNo">072</span>   */<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  public void releaseCurrentSearcher(){<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    DecoderFactory.checkIn(ptSearcher);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.ByteBufferUtils;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>/**<a name="line.40"></a>
+<span class="sourceLineNo">041</span> * These methods have the same definition as any implementation of the EncodedSeeker.<a name="line.41"></a>
+<span class="sourceLineNo">042</span> *<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * In the future, the EncodedSeeker could be modified to work with the Cell interface directly.  It<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * currently returns a new KeyValue object each time getKeyValue is called.  This is not horrible,<a name="line.44"></a>
+<span class="sourceLineNo">045</span> * but in order to create a new KeyValue object, we must first allocate a new byte[] and copy in<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * the data from the PrefixTreeCell.  It is somewhat heavyweight right now.<a name="line.46"></a>
+<span class="sourceLineNo">047</span> */<a name="line.47"></a>
+<span class="sourceLineNo">048</span>@InterfaceAudience.Private<a name="line.48"></a>
+<span class="sourceLineNo">049</span>public class PrefixTreeSeeker implements EncodedSeeker {<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>  protected ByteBuffer block;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  protected boolean includeMvccVersion;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  protected PrefixTreeArraySearcher ptSearcher;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  public PrefixTreeSeeker(boolean includeMvccVersion) {<a name="line.55"></a>
+<span class="sourceLineNo">056</span>    this.includeMvccVersion = includeMvccVersion;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
+<span class="sourceLineNo">058</span><a name="line.58"></a>
+<span class="sourceLineNo">059</span>  @Override<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  public void setCurrentBuffer(ByteBuff fullBlockBuffer) {<a name="line.60"></a>
+<span class="sourceLineNo">061</span>    ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion);<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    rewind();<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  }<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  /**<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * &lt;p&gt;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   * Currently unused.<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   * &lt;/p&gt;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   * TODO performance leak. should reuse the searchers. hbase does not currently have a hook where<a name="line.69"></a>
+<span class="sourceLineNo">070</span>   * this can be called<a name="line.70"></a>
+<span class="sourceLineNo">071</span>   */<a name="line.71"></a>
+<span class="sourceLineNo">072</span>  public void releaseCurrentSearcher(){<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    DecoderFactory.checkIn(ptSearcher);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  }<a name="line.74"></a>
+<span class="sourceLineNo">075</span><a name="line.75"></a>
 <span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  @Override<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  public Cell getKey() {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    return ptSearcher.current();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  }<a name="line.81"></a>
+<span class="sourceLineNo">077</span>  @Override<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  public Cell getKey() {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    return ptSearcher.current();<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
+<span class="sourceLineNo">081</span><a name="line.81"></a>
 <span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  @Override<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public ByteBuffer getValueShallowCopy() {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    return CellUtil.getValueBufferShallowCopy(ptSearcher.current());<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  }<a name="line.87"></a>
-<span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>  /**<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * currently must do deep copy into new array<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   */<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  @Override<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public Cell getCell() {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    // The PrefixTreecell is of type BytebufferedCell and the value part of the cell<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    // determines whether we are offheap cell or onheap cell.  All other parts of the cell-<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    // row, fam and col are all represented as onheap byte[]<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current();<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    if (cell == null) {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      return null;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    }<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    // Use the ByteBuffered cell to see if the Cell is onheap or offheap<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    if (cell.getValueByteBuffer().hasArray()) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.103"></a>
-<span class="sourceLineNo">104</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.105"></a>
-<span class="sourceLineNo">106</span>          cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),<a name="line.106"></a>
-<span class="sourceLineNo">107</span>          cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          cell.getSequenceId());<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    } else {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.110"></a>
-<span class="sourceLineNo">111</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.111"></a>
-<span class="sourceLineNo">112</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.112"></a>
-<span class="sourceLineNo">113</span>          cell.getValueByteBuffer(), cell.getValuePosition(), cell.getValueLength(),<a name="line.113"></a>
-<span class="sourceLineNo">114</span>          cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(),<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          cell.getTypeByte(), cell.getSequenceId());<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    }<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  }<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  /**<a name="line.119"></a>
-<span class="sourceLineNo">120</span>   * &lt;p&gt;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   * Currently unused.<a name="line.121"></a>
-<span class="sourceLineNo">122</span>   * &lt;/p&gt;&lt;p&gt;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   * A nice, lightweight reference, though the underlying cell is transient. This method may return<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * the same reference to the backing PrefixTreeCell repeatedly, while other implementations may<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * return a different reference for each Cell.<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * &lt;/p&gt;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to<a name="line.127"></a>
-<span class="sourceLineNo">128</span>   * use this method instead of the getKeyValue() methods above.<a name="line.128"></a>
-<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  public Cell get() {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    return ptSearcher.current();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  }<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  @Override<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public void rewind() {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    ptSearcher.positionAtFirstCell();<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  @Override<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  public boolean next() {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    return ptSearcher.advance();<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
-<span class="sourceLineNo">143</span><a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public boolean advance() {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    return ptSearcher.advance();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>  }<a name="line.146"></a>
+<span class="sourceLineNo">083</span>  @Override<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  public ByteBuffer getValueShallowCopy() {<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    return CellUtil.getValueBufferShallowCopy(ptSearcher.current());<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  }<a name="line.86"></a>
+<span class="sourceLineNo">087</span><a name="line.87"></a>
+<span class="sourceLineNo">088</span>  /**<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * currently must do deep copy into new array<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  @Override<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  public Cell getCell() {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    // The PrefixTreecell is of type BytebufferedCell and the value part of the cell<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    // determines whether we are offheap cell or onheap cell.  All other parts of the cell-<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    // row, fam and col are all represented as onheap byte[]<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current();<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    if (cell == null) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      return null;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    }<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    // Use the ByteBuffered cell to see if the Cell is onheap or offheap<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    if (cell.getValueByteBuffer().hasArray()) {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.102"></a>
+<span class="sourceLineNo">103</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.104"></a>
+<span class="sourceLineNo">105</span>          cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),<a name="line.106"></a>
+<span class="sourceLineNo">107</span>          cell.getSequenceId());<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    } else {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),<a name="line.109"></a>
+<span class="sourceLineNo">110</span>          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),<a name="line.110"></a>
+<span class="sourceLineNo">111</span>          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),<a name="line.111"></a>
+<span class="sourceLineNo">112</span>          cell.getValueByteBuffer(), cell.getValuePosition(), cell.getValueLength(),<a name="line.112"></a>
+<span class="sourceLineNo">113</span>          cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(),<a name="line.113"></a>
+<span class="sourceLineNo">114</span>          cell.getTypeByte(), cell.getSequenceId());<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * &lt;p&gt;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * Currently unused.<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   * &lt;/p&gt;&lt;p&gt;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * A nice, lightweight reference, though the underlying cell is transient. This method may return<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * the same reference to the backing PrefixTreeCell repeatedly, while other implementations may<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   * return a different reference for each Cell.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   * &lt;/p&gt;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to<a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * use this method instead of the getKeyValue() methods above.<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  public Cell get() {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    return ptSearcher.current();<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public void rewind() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    ptSearcher.positionAtFirstCell();<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>  @Override<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  public boolean next() {<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    return ptSearcher.advance();<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  }<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public boolean advance() {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    return ptSearcher.advance();<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
+<span class="sourceLineNo">146</span><a name="line.146"></a>
 <span class="sourceLineNo">147</span><a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  private static final boolean USE_POSITION_BEFORE = false;<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  /*<a name="line.151"></a>
-<span class="sourceLineNo">152</span>   * Support both of these options since the underlying PrefixTree supports<a name="line.152"></a>
-<span class="sourceLineNo">153</span>   * both. Possibly expand the EncodedSeeker to utilize them both.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   */<a name="line.154"></a>
-<span class="sourceLineNo">155</span><a name="line.155"></a>
-<span class="sourceLineNo">156</span>  protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    // this does a deep copy of the key byte[] because the CellSearcher<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    // interface wants a Cell<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>    if (CellScannerPosition.AT == position) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      if (seekBefore) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        ptSearcher.previous();<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        return 1;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      return 0;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    }<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    return 1;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) {<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    // should probably switch this to use the seekForwardToOrBefore method<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    if (CellScannerPosition.AT == position) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      if (seekBefore) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>        ptSearcher.previous();<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        return 1;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      return 0;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>    if (CellScannerPosition.AFTER == position) {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      if (!ptSearcher.isBeforeFirst()) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>        ptSearcher.previous();<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      }<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      return 1;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    }<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>    if (position == CellScannerPosition.AFTER_LAST) {<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      if (seekBefore) {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        ptSearcher.previous();<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      return 1;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>    throw new RuntimeException("unexpected CellScannerPosition:" + position);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>  }<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>  @Override<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    if (USE_POSITION_BEFORE) {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    } else {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      return seekToOrBeforeUsingPositionAtOrAfter(key, forceBeforeOnExactMatch);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    }<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  }<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>  @Override<a name="line.211"></a>
-<span class="sourceLineNo">212</span>  public int compareKey(CellComparator comparator, Cell key) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    return comparator.compare(key,<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        ptSearcher.current());<a name="line.214"></a>
-<span class="sourceLineNo">215</span>  }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /**<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * Cloned version of the PrefixTreeCell where except the value part, the rest<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   * of the key part is deep copied<a name="line.219"></a>
-<span class="sourceLineNo">220</span>   *<a name="line.220"></a>
-<span class="sourceLineNo">221</span>   */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    private byte[] row;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    private short rowLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    private byte[] fam;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    private byte famLength;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    private byte[] qual;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    private int qualLength;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    private byte[] val;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    private int valOffset;<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    private int valLength;<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    private byte[] tag;<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    private int tagsLength;<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    private long ts;<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    private long seqId;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private byte type;<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val,<a name="line.242"></a>
-<span class="sourceLineNo">243</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        long seqId) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      this.row = new byte[rowLength];<a name="line.245"></a>
-<span class="sourceLineNo">246</span>      System.arraycopy(row, rowOffset, this.row, 0, rowLength);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      this.rowLength = rowLength;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      this.fam = new byte[famLength];<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      System.arraycopy(fam, famOffset, this.fam, 0, famLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      this.famLength = famLength;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>      this.qual = new byte[qualLength];<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      System.arraycopy(qual, qualOffset, this.qual, 0, qualLength);<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      this.qualLength = qualLength;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      this.tag = new byte[tagLength];<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      System.arraycopy(tag, tagOffset, this.tag, 0, tagLength);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      this.tagsLength = tagLength;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      this.val = val;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      this.valLength = valLength;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.valOffset = valOffset;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.ts = ts;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      this.seqId = seqId;<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      this.type = type;<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    }<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>    @Override<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    public void setSequenceId(long seqId) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.seqId = seqId;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    @Override<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public byte[] getRowArray() {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      return this.row;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    }<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>    @Override<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    public int getRowOffset() {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      return 0;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    }<a name="line.278"></a>
-<span class="sourceLineNo">279</span><a name="line.279"></a>
-<span class="sourceLineNo">280</span>    @Override<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    public short getRowLength() {<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      return this.rowLength;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span><a name="line.284"></a>
-<span class="sourceLineNo">285</span>    @Override<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    public byte[] getFamilyArray() {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      return this.fam;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>    @Override<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    public int getFamilyOffset() {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      return 0;<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>    @Override<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    public byte getFamilyLength() {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      return this.famLength;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    @Override<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    public byte[] getQualifierArray() {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      return this.qual;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    }<a name="line.303"></a>
-<span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>    @Override<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    public int getQualifierOffset() {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      return 0;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>    @Override<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    public int getQualifierLength() {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      return this.qualLength;<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>    @Override<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    public long getTimestamp() {<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      return ts;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    }<a name="line.318"></a>
-<span class="sourceLineNo">319</span><a name="line.319"></a>
-<span class="sourceLineNo">320</span>    @Override<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    public byte getTypeByte() {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      return type;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    }<a name="line.323"></a>
-<span class="sourceLineNo">324</span><a name="line.324"></a>
-<span class="sourceLineNo">325</span>    @Override<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    public long getSequenceId() {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      return seqId;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    }<a name="line.328"></a>
-<span class="sourceLineNo">329</span><a name="line.329"></a>
-<span class="sourceLineNo">330</span>    @Override<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    public byte[] getValueArray() {<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      return val;<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    }<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>    @Override<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    public int getValueOffset() {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>      return this.valOffset;<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    }<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>    @Override<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    public int getValueLength() {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>      return this.valLength;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    }<a name="line.343"></a>
-<span class="sourceLineNo">344</span><a name="line.344"></a>
-<span class="sourceLineNo">345</span>    @Override<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    public byte[] getTagsArray() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      return this.tag;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    }<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>    @Override<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    public int getTagsOffset() {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return 0;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span><a name="line.354"></a>
-<span class="sourceLineNo">355</span>    @Override<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    public int getTagsLength() {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      return this.tagsLength;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>    @Override<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    public String toString() {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.364"></a>
-<span class="sourceLineNo">365</span>          getQualifierLength());<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.367"></a>
-<span class="sourceLineNo">368</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>    @Override<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    public long heapSize() {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    }<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell,<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      SettableSequenceId, HeapSize {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.379"></a>
-<span class="sourceLineNo">380</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.380"></a>
-<span class="sourceLineNo">381</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER));<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    private ByteBuffer rowBuff;<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    private short rowLength;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>    private ByteBuffer famBuff;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    private byte famLength;<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    private ByteBuffer qualBuff;<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    private int qualLength;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    private ByteBuffer val;<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    private int valOffset;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private int valLength;<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    private ByteBuffer tagBuff;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    private int tagsLength;<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    private long ts;<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private long seqId;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    private byte type;<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>        long seqId) {<a name="line.399"></a>
-<span class="sourceLineNo">400</span>      byte[] tmpRow = new byte[rowLength];<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      System.arraycopy(row, rowOffset, tmpRow, 0, rowLength);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      this.rowBuff = ByteBuffer.wrap(tmpRow);<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      this.rowLength = rowLength;<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      byte[] tmpFam = new byte[famLength];<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      System.arraycopy(fam, famOffset, tmpFam, 0, famLength);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      this.famBuff = ByteBuffer.wrap(tmpFam);<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      this.famLength = famLength;<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      byte[] tmpQual = new byte[qualLength];<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength);<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      this.qualBuff = ByteBuffer.wrap(tmpQual);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      this.qualLength = qualLength;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>      byte[] tmpTag = new byte[tagLength];<a name="line.412"></a>
-<span class="sourceLineNo">413</span>      System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      this.tagBuff = ByteBuffer.wrap(tmpTag);<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      this.tagsLength = tagLength;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>      this.val = val;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      this.valLength = valLength;<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      this.valOffset = valOffset;<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      this.ts = ts;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>      this.seqId = seqId;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      this.type = type;<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    <a name="line.423"></a>
-<span class="sourceLineNo">424</span>    @Override<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    public void setSequenceId(long seqId) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>      this.seqId = seqId;<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    }<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    @Override<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    public byte[] getRowArray() {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      return this.rowBuff.array();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    public int getRowOffset() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      return getRowPosition();<a name="line.436"></a>
-<span class="sourceLineNo">437</span>    }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    @Override<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    public short getRowLength() {<a name="line.440"></a>
-<span class="sourceLineNo">441</span>      return this.rowLength;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>    @Override<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    public byte[] getFamilyArray() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      return this.famBuff.array();<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    }<a name="line.447"></a>
-<span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    @Override<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    public int getFamilyOffset() {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      return getFamilyPosition();<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    }<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>    @Override<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    public byte getFamilyLength() {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>      return this.famLength;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    @Override<a name="line.459"></a>
-<span class="sourceLineNo">460</span>    public byte[] getQualifierArray() {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      return this.qualBuff.array();<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    }<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    @Override<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    public int getQualifierOffset() {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      return getQualifierPosition();<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
-<span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>    @Override<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    public int getQualifierLength() {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      return this.qualLength;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @Override<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    public long getTimestamp() {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      return ts;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    }<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>    @Override<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    public byte getTypeByte() {<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      return type;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    }<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    @Override<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    public long getSequenceId() {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      return seqId;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    @Override<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    public byte[] getValueArray() {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      byte[] tmpVal = new byte[valLength];<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      ByteBufferUtils.copyFromBufferToArray(tmpVal, val, valOffset, 0, valLength);<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      return tmpVal;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>    @Override<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    public int getValueOffset() {<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      return 0;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    }<a name="line.499"></a>
-<span class="sourceLineNo">500</span><a name="line.500"></a>
-<span class="sourceLineNo">501</span>    @Override<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    public int getValueLength() {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      return this.valLength;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    }<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>    @Override<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    public byte[] getTagsArray() {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      return this.tagBuff.array();<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public int getTagsOffset() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return getTagsPosition();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getTagsLength() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return this.tagsLength;<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>    <a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public ByteBuffer getRowByteBuffer() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowBuff;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    <a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public int getRowPosition() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return 0;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    <a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return this.famBuff;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    <a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public int getFamilyPosition() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return 0;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    <a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.qualBuff;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierPosition() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return 0;<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.tagBuff;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public int getTagsPosition() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return 0;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public ByteBuffer getValueByteBuffer() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.val;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public int getValuePosition() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.valOffset;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public long heapSize() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public String toString() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.580"></a>
-<span class="sourceLineNo">581</span>          getQualifierLength());<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.583"></a>
-<span class="sourceLineNo">584</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    }<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>}<a name="line.587"></a>
+<span class="sourceLineNo">148</span>  private static final boolean USE_POSITION_BEFORE = false;<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>  /*<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * Support both of these options since the underlying PrefixTree supports<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * both. Possibly expand the EncodedSeeker to utilize them both.<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   */<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>    // this does a deep copy of the key byte[] because the CellSearcher<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    // interface wants a Cell<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>    if (CellScannerPosition.AT == position) {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      if (seekBefore) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        ptSearcher.previous();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>        return 1;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      return 0;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
+<span class="sourceLineNo">167</span><a name="line.167"></a>
+<span class="sourceLineNo">168</span>    return 1;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  }<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>  protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) {<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    // should probably switch this to use the seekForwardToOrBefore method<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>    if (CellScannerPosition.AT == position) {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      if (seekBefore) {<a name="line.176"></a>
+<span class="sourceLineNo">177</span>        ptSearcher.previous();<a name="line.177"></a>
+<span class="sourceLineNo">178</span>        return 1;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      }<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      return 0;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>    if (CellScannerPosition.AFTER == position) {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      if (!ptSearcher.isBeforeFirst()) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>        ptSearcher.previous();<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      }<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      return 1;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    }<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>    if (position == CellScannerPosition.AFTER_LAST) {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      if (seekBefore) {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        ptSearcher.previous();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      }<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      return 1;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>    throw new RuntimeException("unexpected CellScannerPosition:" + position);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>  @Override<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) {<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    if (USE_POSITION_BEFORE) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    } else {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      return seekToOrBeforeUsingPositionAtOrAfter(key, forceBeforeOnExactMatch);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>  }<a name="line.208"></a>
+<span class="sourceLineNo">209</span><a name="line.209"></a>
+<span class="sourceLineNo">210</span>  @Override<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  public int compareKey(CellComparator comparator, Cell key) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    return comparator.compare(key,<a name="line.212"></a>
+<span class="sourceLineNo">213</span>        ptSearcher.current());<a name="line.213"></a>
+<span class="sourceLineNo">214</span>  }<a name="line.214"></a>
+<span class="sourceLineNo">215</span><a name="line.215"></a>
+<span class="sourceLineNo">216</span>  /**<a name="line.216"></a>
+<span class="sourceLineNo">217</span>   * Cloned version of the PrefixTreeCell where except the value part, the rest<a name="line.217"></a>
+<span class="sourceLineNo">218</span>   * of the key part is deep copied<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   *<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   */<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    private byte[] row;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    private short rowLength;<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    private byte[] fam;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    private byte famLength;<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    private byte[] qual;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    private int qualLength;<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    private byte[] val;<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    private int valOffset;<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    private int valLength;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    private byte[] tag;<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    private int tagsLength;<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    private long ts;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    private long seqId;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    private byte type;<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>    public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.240"></a>
+<span class="sourceLineNo">241</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val,<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.242"></a>
+<span class="sourceLineNo">243</span>        long seqId) {<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      this.row = new byte[rowLength];<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      System.arraycopy(row, rowOffset, this.row, 0, rowLength);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      this.rowLength = rowLength;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      this.fam = new byte[famLength];<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      System.arraycopy(fam, famOffset, this.fam, 0, famLength);<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      this.famLength = famLength;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      this.qual = new byte[qualLength];<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      System.arraycopy(qual, qualOffset, this.qual, 0, qualLength);<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      this.qualLength = qualLength;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      this.tag = new byte[tagLength];<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      System.arraycopy(tag, tagOffset, this.tag, 0, tagLength);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      this.tagsLength = tagLength;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      this.val = val;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      this.valLength = valLength;<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      this.valOffset = valOffset;<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      this.ts = ts;<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.seqId = seqId;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.type = type;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    @Override<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    public void setSequenceId(long seqId) {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      this.seqId = seqId;<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    }<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>    @Override<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    public byte[] getRowArray() {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      return this.row;<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    }<a name="line.272"></a>
+<span class="sourceLineNo">273</span><a name="line.273"></a>
+<span class="sourceLineNo">274</span>    @Override<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    public int getRowOffset() {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      return 0;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    }<a name="line.277"></a>
+<span class="sourceLineNo">278</span><a name="line.278"></a>
+<span class="sourceLineNo">279</span>    @Override<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public short getRowLength() {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this.rowLength;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public byte[] getFamilyArray() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return this.fam;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>    @Override<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public int getFamilyOffset() {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      return 0;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>    @Override<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public byte getFamilyLength() {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      return this.famLength;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>    @Override<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    public byte[] getQualifierArray() {<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      return this.qual;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>    @Override<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    public int getQualifierOffset() {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      return 0;<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>    @Override<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    public int getQualifierLength() {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>      return this.qualLength;<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
+<span class="sourceLineNo">313</span><a name="line.313"></a>
+<span class="sourceLineNo">314</span>    @Override<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    public long getTimestamp() {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>      return ts;<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>    @Override<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    public byte getTypeByte() {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      return type;<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>    @Override<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    public long getSequenceId() {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      return seqId;<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    @Override<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    public byte[] getValueArray() {<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      return val;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    }<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>    @Override<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    public int getValueOffset() {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>      return this.valOffset;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    }<a name="line.337"></a>
+<span class="sourceLineNo">338</span><a name="line.338"></a>
+<span class="sourceLineNo">339</span>    @Override<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    public int getValueLength() {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      return this.valLength;<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    }<a name="line.342"></a>
+<span class="sourceLineNo">343</span><a name="line.343"></a>
+<span class="sourceLineNo">344</span>    @Override<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    public byte[] getTagsArray() {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>      return this.tag;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    }<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>    @Override<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    public int getTagsOffset() {<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      return 0;<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    }<a name="line.352"></a>
+<span class="sourceLineNo">353</span><a name="line.353"></a>
+<span class="sourceLineNo">354</span>    @Override<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    public int getTagsLength() {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>      return this.tagsLength;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span><a name="line.358"></a>
+<span class="sourceLineNo">359</span>    @Override<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    public String toString() {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());<a name="line.361"></a>
+<span class="sourceLineNo">362</span>      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          getQualifierLength());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>      String timestamp = String.valueOf((getTimestamp()));<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      return row + "/" + family + (family != null &amp;&amp; family.length() &gt; 0 ? ":" : "") + qualifier<a name="line.366"></a>
+<span class="sourceLineNo">367</span>          + "/" + timestamp + "/" + Type.codeToType(type);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    }<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>    @Override<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    public long heapSize() {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  }<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell,<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      SettableSequenceId, HeapSize {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.378"></a>
+<span class="sourceLineNo">379</span>        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER));<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    private ByteBuffer rowBuff;<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    private short rowLength;<a name="line.382"></a>
+<span class="sourceLineNo">383</span>    private ByteBuffer famBuff;<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    private byte famLength;<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    private ByteBuffer qualBuff;<a name="line.385"></a>
+<span class="sourceLineNo">386</span>    private int qualLength;<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    private ByteBuffer val;<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    private int valOffset;<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    private int valLength;<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    private ByteBuffer tagBuff;<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    private int tagsLength;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    private long ts;<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    private long seqId;<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    private byte type;<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val,<a name="line.396"></a>
+<span class="sourceLineNo">397</span>        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        long seqId) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      byte[] tmpRow = new byte[rowLength];<a name="line.399"></a>
+<span class="sourceLineNo">400</span>      System.arraycopy(row, rowOffset, tmpRow, 0, rowLength);<a name="line.400"></a>
+<span class="sourceLineNo">401</span>      this.rowBuff = ByteBuffer.wrap(tmpRow);<a name="line.401"></a>
+<span class="sourceLineNo">402</span>      this.rowLength = rowLength;<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      byte[] tmpFam = new byte[famLength];<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      System.arraycopy(fam, famOffset, tmpFam, 0, famLength);<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      this.famBuff = ByteBuffer.wrap(tmpFam);<a name="line.405"></a>
+<span class="sourceLineNo">406</span>      this.famLength = famLength;<a name="line.406"></a>
+<span class="sourceLineNo">407</span>      byte[] tmpQual = new byte[qualLength];<a name="line.407"></a>
+<span class="sourceLineNo">408</span>      System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength);<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      this.qualBuff = ByteBuffer.wrap(tmpQual);<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      this.qualLength = qualLength;<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      byte[] tmpTag = new byte[tagLength];<a name="line.411"></a>
+<span class="sourceLineNo">412</span>      System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength);<a name="line.412"></a>
+<span class="sourceLineNo">413</span>      this.tagBuff = ByteBuffer.wrap(tmpTag);<a name="line.413"></a>
+<span class="sourceLineNo">414</span>      this.tagsLength = tagLength;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>      this.val = val;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>      this.valLength = valLength;<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      this.valOffset = valOffset;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>      this.ts = ts;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      this.seqId = seqId;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>      this.type = type;<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    }<a name="line.421"></a>
+<span class="sourceLineNo">422</span>    <a name="line.422"></a>
+<span class="sourceLineNo">423</span>    @Override<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    public void setSequenceId(long seqId) {<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      this.seqId = seqId;<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span><a name="line.427"></a>
+<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    public byte[] getRowArray() {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      return this.rowBuff.array();<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    public int getRowOffset() {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      return getRowPosition();<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    public short getRowLength() {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      return this.rowLength;<a name="line.440"></a>
+<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
+<span class="sourceLineNo">442</span><a name="line.442"></a>
+<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
+<span class="sourceLineNo">444</span>    public byte[] getFamilyArray() {<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      return this.famBuff.array();<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    }<a name="line.446"></a>
+<span class="sourceLineNo">447</span><a name="line.447"></a>
+<span class="sourceLineNo">448</span>    @Override<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    public int getFamilyOffset() {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      return getFamilyPosition();<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    }<a

<TRUNCATED>

[32/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
index 87da4a7..12e2be7 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html
@@ -135,57 +135,57 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 </td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><code>protected <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a></code></td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#blockWriter">blockWriter</a></strong></code>
+<div class="block">block writer</div>
+</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#cacheConf">cacheConf</a></strong></code>
 <div class="block">Cache configuration for caching data on write.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected boolean</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#closeOutputStream">closeOutputStream</a></strong></code>
 <div class="block">True if we opened the <code>outputStream</code> (and so will close it).</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#comparator">comparator</a></strong></code>
 <div class="block">Key comparator.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#dataBlockIndexWriter">dataBlockIndexWriter</a></strong></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected long</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#entryCount">entryCount</a></strong></code>
 <div class="block">Total # of key/value entries, i.e.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#fileInfo">fileInfo</a></strong></code>
 <div class="block">A "file info" block: a key-value map of file-wide metadata.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#firstCellInBlock">firstCellInBlock</a></strong></code>
 <div class="block">First cell in a block.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#firstDataBlockOffset">firstDataBlockOffset</a></strong></code>
 <div class="block">The offset of the first data block or -1 if the file is empty.</div>
 </td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code>protected <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a></code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#fsBlockWriter">fsBlockWriter</a></strong></code>
-<div class="block">block writer</div>
-</td>
-</tr>
 <tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a></code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#hFileContext">hFileContext</a></strong></code>&nbsp;</td>
@@ -290,6 +290,10 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <div class="block">Used for calculating the average value length.</div>
 </td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code>private static long</code></td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#UNSET">UNSET</a></strong></code>&nbsp;</td>
+</tr>
 </table>
 <ul class="blockList">
 <li class="blockList"><a name="fields_inherited_from_class_org.apache.hadoop.hbase.io.hfile.HFile.Writer">
@@ -425,7 +429,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <tr class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#finishBlock()">finishBlock</a></strong>()</code>
-<div class="block">Clean up the current data block</div>
+<div class="block">Clean up the data block that is currently being written.</div>
 </td>
 </tr>
 <tr class="rowColor">
@@ -533,13 +537,23 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.58">LOG</a></pre>
 </li>
 </ul>
+<a name="UNSET">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>UNSET</h4>
+<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.60">UNSET</a></pre>
+<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileWriterImpl.UNSET">Constant Field Values</a></dd></dl>
+</li>
+</ul>
 <a name="lastCell">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>lastCell</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.61">lastCell</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.63">lastCell</a></pre>
 <div class="block">The Cell previously appended. Becomes the last cell in the file.</div>
 </li>
 </ul>
@@ -549,7 +563,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>outputStream</h4>
-<pre>protected&nbsp;org.apache.hadoop.fs.FSDataOutputStream <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.64">outputStream</a></pre>
+<pre>protected&nbsp;org.apache.hadoop.fs.FSDataOutputStream <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.66">outputStream</a></pre>
 <div class="block">FileSystem stream to write into.</div>
 </li>
 </ul>
@@ -559,7 +573,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>closeOutputStream</h4>
-<pre>protected final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.67">closeOutputStream</a></pre>
+<pre>protected final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.69">closeOutputStream</a></pre>
 <div class="block">True if we opened the <code>outputStream</code> (and so will close it).</div>
 </li>
 </ul>
@@ -569,7 +583,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>fileInfo</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.70">fileInfo</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.72">fileInfo</a></pre>
 <div class="block">A "file info" block: a key-value map of file-wide metadata.</div>
 </li>
 </ul>
@@ -579,7 +593,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>entryCount</h4>
-<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.73">entryCount</a></pre>
+<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.75">entryCount</a></pre>
 <div class="block">Total # of key/value entries, i.e. how many times add() was called.</div>
 </li>
 </ul>
@@ -589,7 +603,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>totalKeyLength</h4>
-<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.76">totalKeyLength</a></pre>
+<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.78">totalKeyLength</a></pre>
 <div class="block">Used for calculating the average key length.</div>
 </li>
 </ul>
@@ -599,7 +613,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>totalValueLength</h4>
-<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.79">totalValueLength</a></pre>
+<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.81">totalValueLength</a></pre>
 <div class="block">Used for calculating the average value length.</div>
 </li>
 </ul>
@@ -609,7 +623,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>totalUncompressedBytes</h4>
-<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.82">totalUncompressedBytes</a></pre>
+<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.84">totalUncompressedBytes</a></pre>
 <div class="block">Total uncompressed bytes, maybe calculate a compression ratio later.</div>
 </li>
 </ul>
@@ -619,7 +633,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>comparator</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.85">comparator</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.87">comparator</a></pre>
 <div class="block">Key comparator. Used to ensure we write in order.</div>
 </li>
 </ul>
@@ -629,7 +643,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>metaNames</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;byte[]&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.88">metaNames</a></pre>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;byte[]&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.90">metaNames</a></pre>
 <div class="block">Meta block names.</div>
 </li>
 </ul>
@@ -639,7 +653,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>metaData</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.io.Writable&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.91">metaData</a></pre>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.io.Writable&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.93">metaData</a></pre>
 <div class="block"><code>Writable</code>s representing meta block data.</div>
 </li>
 </ul>
@@ -649,7 +663,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>firstCellInBlock</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.97">firstCellInBlock</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.99">firstCellInBlock</a></pre>
 <div class="block">First cell in a block.
  This reference should be short-lived since we write hfiles in a burst.</div>
 </li>
@@ -660,7 +674,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>path</h4>
-<pre>protected final&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.101">path</a></pre>
+<pre>protected final&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.103">path</a></pre>
 <div class="block">May be null if we were passed a stream.</div>
 </li>
 </ul>
@@ -670,7 +684,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheConf</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.104">cacheConf</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.106">cacheConf</a></pre>
 <div class="block">Cache configuration for caching data on write.</div>
 </li>
 </ul>
@@ -680,7 +694,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>name</h4>
-<pre>protected final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.110">name</a></pre>
+<pre>protected final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.112">name</a></pre>
 <div class="block">Name for this object used when logging or in toString. Is either
  the result of a toString on stream or else name of passed file Path.</div>
 </li>
@@ -691,7 +705,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>blockEncoder</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.116">blockEncoder</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.118">blockEncoder</a></pre>
 <div class="block">The data block encoding which will be used.
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.html#INSTANCE"><code>NoOpDataBlockEncoder.INSTANCE</code></a> if there is no encoding.</div>
 </li>
@@ -702,7 +716,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>hFileContext</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.118">hFileContext</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.120">hFileContext</a></pre>
 </li>
 </ul>
 <a name="maxTagsLength">
@@ -711,7 +725,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>maxTagsLength</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.120">maxTagsLength</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.122">maxTagsLength</a></pre>
 </li>
 </ul>
 <a name="KEY_VALUE_VERSION">
@@ -720,7 +734,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>KEY_VALUE_VERSION</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.123">KEY_VALUE_VERSION</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.125">KEY_VALUE_VERSION</a></pre>
 <div class="block">KeyValue version in FileInfo</div>
 </li>
 </ul>
@@ -730,7 +744,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>KEY_VALUE_VER_WITH_MEMSTORE</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.126">KEY_VALUE_VER_WITH_MEMSTORE</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.128">KEY_VALUE_VER_WITH_MEMSTORE</a></pre>
 <div class="block">Version for KeyValue which includes memstore timestamp</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileWriterImpl.KEY_VALUE_VER_WITH_MEMSTORE">Constant Field Values</a></dd></dl>
 </li>
@@ -741,17 +755,17 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>inlineBlockWriters</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.html" title="interface in org.apache.hadoop.hbase.io.hfile">InlineBlockWriter</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.129">inlineBlockWriters</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.html" title="interface in org.apache.hadoop.hbase.io.hfile">InlineBlockWriter</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.131">inlineBlockWriters</a></pre>
 <div class="block">Inline block writers for multi-level block index and compound Blooms.</div>
 </li>
 </ul>
-<a name="fsBlockWriter">
+<a name="blockWriter">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>fsBlockWriter</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.132">fsBlockWriter</a></pre>
+<h4>blockWriter</h4>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.134">blockWriter</a></pre>
 <div class="block">block writer</div>
 </li>
 </ul>
@@ -761,7 +775,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>dataBlockIndexWriter</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.134">dataBlockIndexWriter</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.136">dataBlockIndexWriter</a></pre>
 </li>
 </ul>
 <a name="metaBlockIndexWriter">
@@ -770,7 +784,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>metaBlockIndexWriter</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.135">metaBlockIndexWriter</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.137">metaBlockIndexWriter</a></pre>
 </li>
 </ul>
 <a name="firstDataBlockOffset">
@@ -779,7 +793,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>firstDataBlockOffset</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.138">firstDataBlockOffset</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.140">firstDataBlockOffset</a></pre>
 <div class="block">The offset of the first data block or -1 if the file is empty.</div>
 </li>
 </ul>
@@ -789,7 +803,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>lastDataBlockOffset</h4>
-<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.141">lastDataBlockOffset</a></pre>
+<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.143">lastDataBlockOffset</a></pre>
 <div class="block">The offset of the last data block or 0 if the file is empty.</div>
 </li>
 </ul>
@@ -799,7 +813,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>lastCellOfPreviousBlock</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.147">lastCellOfPreviousBlock</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.149">lastCellOfPreviousBlock</a></pre>
 <div class="block">The last(stop) Cell of the previous data block.
  This reference should be short-lived since we write hfiles in a burst.</div>
 </li>
@@ -810,7 +824,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>additionalLoadOnOpenData</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.150">additionalLoadOnOpenData</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockWritable</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.152">additionalLoadOnOpenData</a></pre>
 <div class="block">Additional data items to be written to the "load-on-open" section.</div>
 </li>
 </ul>
@@ -820,7 +834,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockListLast">
 <li class="blockList">
 <h4>maxMemstoreTS</h4>
-<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.152">maxMemstoreTS</a></pre>
+<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.154">maxMemstoreTS</a></pre>
 </li>
 </ul>
 </li>
@@ -837,7 +851,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileWriterImpl</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.154">HFileWriterImpl</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.156">HFileWriterImpl</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf,
                org.apache.hadoop.fs.Path&nbsp;path,
                org.apache.hadoop.fs.FSDataOutputStream&nbsp;outputStream,
@@ -859,7 +873,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>appendFileInfo</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.190">appendFileInfo</a>(byte[]&nbsp;k,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.191">appendFileInfo</a>(byte[]&nbsp;k,
                   byte[]&nbsp;v)
                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Add to the file info. All added key/value pairs can be obtained using
@@ -878,7 +892,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>writeFileInfo</h4>
-<pre>protected final&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.205">writeFileInfo</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a>&nbsp;trailer,
+<pre>protected final&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.206">writeFileInfo</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a>&nbsp;trailer,
                  <a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
                             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Sets the file info offset in the trailer, finishes up populating fields in
@@ -896,7 +910,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>checkKey</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.219">checkKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.220">checkKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Checks that the given Cell's key does not violate the key order.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>cell</code> - Cell whose key to check.</dd>
@@ -911,7 +925,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>checkValue</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.239">checkValue</a>(byte[]&nbsp;value,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.240">checkValue</a>(byte[]&nbsp;value,
               int&nbsp;offset,
               int&nbsp;length)
                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -926,7 +940,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>getPath</h4>
-<pre>public&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.250">getPath</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.251">getPath</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#getPath()">getPath</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Writer</a></code></dd>
@@ -939,7 +953,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.255">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.256">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -952,7 +966,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>compressionByName</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.260">compressionByName</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;algoName)</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.261">compressionByName</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;algoName)</pre>
 </li>
 </ul>
 <a name="createOutputStream(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, java.net.InetSocketAddress[])">
@@ -961,7 +975,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>createOutputStream</h4>
-<pre>protected static&nbsp;org.apache.hadoop.fs.FSDataOutputStream&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.267">createOutputStream</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>protected static&nbsp;org.apache.hadoop.fs.FSDataOutputStream&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.268">createOutputStream</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                          org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                          org.apache.hadoop.fs.Path&nbsp;path,
                                                          <a href="http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true" title="class or interface in java.net">InetSocketAddress</a>[]&nbsp;favoredNodes)
@@ -977,7 +991,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>finishInit</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.275">finishInit</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.276">finishInit</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Additional initialization steps</div>
 </li>
 </ul>
@@ -987,7 +1001,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>checkBlockBoundary</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.301">checkBlockBoundary</a>()
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.302">checkBlockBoundary</a>()
                            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">At a block boundary, write all the inline blocks and opens new block.</div>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -1000,9 +1014,9 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>finishBlock</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.309">finishBlock</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.310">finishBlock</a>()
                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Clean up the current data block</div>
+<div class="block">Clean up the data block that is currently being written.</div>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
 </li>
@@ -1013,7 +1027,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>getMidpoint</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.342">getMidpoint</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.343">getMidpoint</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
                <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;left,
                <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;right)</pre>
 <div class="block">Try to return a Cell that falls between <code>left</code> and
@@ -1030,7 +1044,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>getMinimumMidpointArray</h4>
-<pre>private static&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.430">getMinimumMidpointArray</a>(byte[]&nbsp;leftArray,
+<pre>private static&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.431">getMinimumMidpointArray</a>(byte[]&nbsp;leftArray,
                              int&nbsp;leftOffset,
                              int&nbsp;leftLength,
                              byte[]&nbsp;rightArray,
@@ -1048,7 +1062,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>writeInlineBlocks</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.459">writeInlineBlocks</a>(boolean&nbsp;closing)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.460">writeInlineBlocks</a>(boolean&nbsp;closing)
                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Gives inline block writers an opportunity to contribute blocks.</div>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -1061,7 +1075,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>doCacheOnWrite</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.483">doCacheOnWrite</a>(long&nbsp;offset)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.484">doCacheOnWrite</a>(long&nbsp;offset)</pre>
 <div class="block">Caches the last written HFile block.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>offset</code> - the offset of the block we want to cache. Used to determine
           the cache key.</dd></dl>
@@ -1073,7 +1087,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>newBlock</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.493">newBlock</a>()
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.494">newBlock</a>()
                  throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Ready a new block for writing.</div>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -1086,7 +1100,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>appendMetaBlock</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.514">appendMetaBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;metaBlockName,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.515">appendMetaBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;metaBlockName,
                    org.apache.hadoop.io.Writable&nbsp;content)</pre>
 <div class="block">Add a meta block to the end of the file. Call before close(). Metadata
  blocks are expensive. Fill one with a bunch of serialized data rather than
@@ -1104,7 +1118,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.530">close</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.531">close</a>()
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -1121,7 +1135,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>addInlineBlockWriter</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.612">addInlineBlockWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.html" title="interface in org.apache.hadoop.hbase.io.hfile">InlineBlockWriter</a>&nbsp;ibw)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.613">addInlineBlockWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.html" title="interface in org.apache.hadoop.hbase.io.hfile">InlineBlockWriter</a>&nbsp;ibw)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#addInlineBlockWriter(org.apache.hadoop.hbase.io.hfile.InlineBlockWriter)">HFile.Writer</a></code></strong></div>
 <div class="block">Adds an inline block writer such as a multi-level block index writer or
  a compound Bloom filter writer.</div>
@@ -1137,7 +1151,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>addGeneralBloomFilter</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.617">addGeneralBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.618">addGeneralBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#addGeneralBloomFilter(org.apache.hadoop.hbase.util.BloomFilterWriter)">HFile.Writer</a></code></strong></div>
 <div class="block">Store general Bloom filter in the file. This does not deal with Bloom filter
  internals but is necessary, since Bloom filters are stored differently
@@ -1154,7 +1168,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>addDeleteFamilyBloomFilter</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.622">addDeleteFamilyBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.623">addDeleteFamilyBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#addDeleteFamilyBloomFilter(org.apache.hadoop.hbase.util.BloomFilterWriter)">HFile.Writer</a></code></strong></div>
 <div class="block">Store delete family Bloom filter in the file, which is only supported in
  HFile V2.</div>
@@ -1170,7 +1184,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>addBloomFilter</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.626">addBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.627">addBloomFilter</a>(<a href="../../../../../../org/apache/hadoop/hbase/util/BloomFilterWriter.html" title="interface in org.apache.hadoop.hbase.util">BloomFilterWriter</a>&nbsp;bfw,
                   <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
 </li>
 </ul>
@@ -1180,7 +1194,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.653">getFileContext</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.654">getFileContext</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Writer.html#getFileContext()">HFile.Writer</a></code></strong></div>
 <div class="block">Return the file context for the HFile this writer belongs to</div>
 <dl>
@@ -1195,7 +1209,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>append</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.666">append</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.667">append</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Add key/value to file. Keys must be added in an order that agrees with the
  Comparator passed on construction.</div>
@@ -1213,7 +1227,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>finishFileInfo</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.699">finishFileInfo</a>()
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.700">finishFileInfo</a>()
                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -1225,7 +1239,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>getMajorVersion</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.732">getMajorVersion</a>()</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.733">getMajorVersion</a>()</pre>
 </li>
 </ul>
 <a name="getMinorVersion()">
@@ -1234,7 +1248,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockList">
 <li class="blockList">
 <h4>getMinorVersion</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.736">getMinorVersion</a>()</pre>
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.737">getMinorVersion</a>()</pre>
 </li>
 </ul>
 <a name="finishClose(org.apache.hadoop.hbase.io.hfile.FixedFileTrailer)">
@@ -1243,7 +1257,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Wri
 <ul class="blockListLast">
 <li class="blockList">
 <h4>finishClose</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.740">finishClose</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a>&nbsp;trailer)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#line.741">finishClose</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a>&nbsp;trailer)
                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html
index c573349..aba3996 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html
@@ -99,7 +99,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.470">BucketAllocator.IndexStatistics</a>
+<pre>static class <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.479">BucketAllocator.IndexStatistics</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -234,7 +234,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>freeCount</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.471">freeCount</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.480">freeCount</a></pre>
 </li>
 </ul>
 <a name="usedCount">
@@ -243,7 +243,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>usedCount</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.471">usedCount</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.480">usedCount</a></pre>
 </li>
 </ul>
 <a name="itemSize">
@@ -252,7 +252,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>itemSize</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.471">itemSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.480">itemSize</a></pre>
 </li>
 </ul>
 <a name="totalCount">
@@ -261,7 +261,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>totalCount</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.471">totalCount</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.480">totalCount</a></pre>
 </li>
 </ul>
 </li>
@@ -278,7 +278,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>BucketAllocator.IndexStatistics</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.501">BucketAllocator.IndexStatistics</a>(long&nbsp;free,
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.510">BucketAllocator.IndexStatistics</a>(long&nbsp;free,
                                long&nbsp;used,
                                long&nbsp;itemSize)</pre>
 </li>
@@ -289,7 +289,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketAllocator.IndexStatistics</h4>
-<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.505">BucketAllocator.IndexStatistics</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.514">BucketAllocator.IndexStatistics</a>()</pre>
 </li>
 </ul>
 </li>
@@ -306,7 +306,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>freeCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.473">freeCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.482">freeCount</a>()</pre>
 </li>
 </ul>
 <a name="usedCount()">
@@ -315,7 +315,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>usedCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.477">usedCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.486">usedCount</a>()</pre>
 </li>
 </ul>
 <a name="totalCount()">
@@ -324,7 +324,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>totalCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.481">totalCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.490">totalCount</a>()</pre>
 </li>
 </ul>
 <a name="freeBytes()">
@@ -333,7 +333,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>freeBytes</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.485">freeBytes</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.494">freeBytes</a>()</pre>
 </li>
 </ul>
 <a name="usedBytes()">
@@ -342,7 +342,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>usedBytes</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.489">usedBytes</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.498">usedBytes</a>()</pre>
 </li>
 </ul>
 <a name="totalBytes()">
@@ -351,7 +351,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>totalBytes</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.493">totalBytes</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.502">totalBytes</a>()</pre>
 </li>
 </ul>
 <a name="itemSize()">
@@ -360,7 +360,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>itemSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.497">itemSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.506">itemSize</a>()</pre>
 </li>
 </ul>
 <a name="setTo(long, long, long)">
@@ -369,7 +369,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>setTo</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.509">setTo</a>(long&nbsp;free,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html#line.518">setTo</a>(long&nbsp;free,
          long&nbsp;used,
          long&nbsp;itemSize)</pre>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html
index 8c95683..be68c57 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html
@@ -175,7 +175,9 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>static int</code></td>
-<td class="colLast"><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#FEWEST_ITEMS_IN_BUCKET">FEWEST_ITEMS_IN_BUCKET</a></strong></code>&nbsp;</td>
+<td class="colLast"><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#FEWEST_ITEMS_IN_BUCKET">FEWEST_ITEMS_IN_BUCKET</a></strong></code>
+<div class="block">So, what is the minimum amount of items we'll tolerate in a single bucket?</div>
+</td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
@@ -334,7 +336,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_BUCKET_SIZES</h4>
-<pre>private static final&nbsp;int[] <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.275">DEFAULT_BUCKET_SIZES</a></pre>
+<pre>private static final&nbsp;int[] <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.276">DEFAULT_BUCKET_SIZES</a></pre>
 </li>
 </ul>
 <a name="FEWEST_ITEMS_IN_BUCKET">
@@ -343,7 +345,8 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>FEWEST_ITEMS_IN_BUCKET</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.292">FEWEST_ITEMS_IN_BUCKET</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.296">FEWEST_ITEMS_IN_BUCKET</a></pre>
+<div class="block">So, what is the minimum amount of items we'll tolerate in a single bucket?</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.FEWEST_ITEMS_IN_BUCKET">Constant Field Values</a></dd></dl>
 </li>
 </ul>
@@ -353,7 +356,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>bucketSizes</h4>
-<pre>private final&nbsp;int[] <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.294">bucketSizes</a></pre>
+<pre>private final&nbsp;int[] <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.298">bucketSizes</a></pre>
 </li>
 </ul>
 <a name="bigItemSize">
@@ -362,7 +365,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>bigItemSize</h4>
-<pre>private final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.295">bigItemSize</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.299">bigItemSize</a></pre>
 </li>
 </ul>
 <a name="bucketCapacity">
@@ -371,7 +374,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>bucketCapacity</h4>
-<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.297">bucketCapacity</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.301">bucketCapacity</a></pre>
 </li>
 </ul>
 <a name="buckets">
@@ -380,7 +383,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>buckets</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.Bucket</a>[] <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.298">buckets</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.Bucket</a>[] <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.302">buckets</a></pre>
 </li>
 </ul>
 <a name="bucketSizeInfos">
@@ -389,7 +392,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>bucketSizeInfos</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.BucketSizeInfo</a>[] <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.299">bucketSizeInfos</a></pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.BucketSizeInfo</a>[] <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.303">bucketSizeInfos</a></pre>
 </li>
 </ul>
 <a name="totalSize">
@@ -398,7 +401,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>totalSize</h4>
-<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.300">totalSize</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.304">totalSize</a></pre>
 </li>
 </ul>
 <a name="usedSize">
@@ -407,7 +410,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>usedSize</h4>
-<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.301">usedSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.305">usedSize</a></pre>
 </li>
 </ul>
 </li>
@@ -424,7 +427,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>BucketAllocator</h4>
-<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.303">BucketAllocator</a>(long&nbsp;availableSpace,
+<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.307">BucketAllocator</a>(long&nbsp;availableSpace,
                int[]&nbsp;bucketSizes)
           throws <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocatorException.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocatorException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -437,7 +440,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BucketAllocator</h4>
-<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.334">BucketAllocator</a>(long&nbsp;availableSpace,
+<pre><a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.343">BucketAllocator</a>(long&nbsp;availableSpace,
                int[]&nbsp;bucketSizes,
                <a href="http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheKey.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheKey</a>,<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache.BucketEntry</a>&gt;&nbsp;map,
                <a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a>&nbsp;realCacheSize)
@@ -463,7 +466,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>roundUpToBucketSizeInfo</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.BucketSizeInfo</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.285">roundUpToBucketSizeInfo</a>(int&nbsp;blockSize)</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.BucketSizeInfo.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.BucketSizeInfo</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.286">roundUpToBucketSizeInfo</a>(int&nbsp;blockSize)</pre>
 <div class="block">Round up the given block size to bucket size, and get the corresponding
  BucketSizeInfo</div>
 </li>
@@ -474,7 +477,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.386">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.395">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -487,7 +490,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getUsedSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.397">getUsedSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.406">getUsedSize</a>()</pre>
 </li>
 </ul>
 <a name="getFreeSize()">
@@ -496,7 +499,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getFreeSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.401">getFreeSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.410">getFreeSize</a>()</pre>
 </li>
 </ul>
 <a name="getTotalSize()">
@@ -505,7 +508,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.405">getTotalSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.414">getTotalSize</a>()</pre>
 </li>
 </ul>
 <a name="allocateBlock(int)">
@@ -514,7 +517,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>allocateBlock</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.416">allocateBlock</a>(int&nbsp;blockSize)
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.425">allocateBlock</a>(int&nbsp;blockSize)
                    throws <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/CacheFullException.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">CacheFullException</a>,
                           <a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocatorException.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocatorException</a></pre>
 <div class="block">Allocate a block with specified size. Return the offset</div>
@@ -531,7 +534,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>grabGlobalCompletelyFreeBucket</h4>
-<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.Bucket</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.434">grabGlobalCompletelyFreeBucket</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.Bucket</a>&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.443">grabGlobalCompletelyFreeBucket</a>()</pre>
 </li>
 </ul>
 <a name="freeBlock(long)">
@@ -540,7 +543,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>freeBlock</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.447">freeBlock</a>(long&nbsp;offset)</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.456">freeBlock</a>(long&nbsp;offset)</pre>
 <div class="block">Free a block with the offset</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>offset</code> - block's offset</dd>
 <dt><span class="strong">Returns:</span></dt><dd>size freed</dd></dl>
@@ -552,7 +555,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>sizeIndexOfAllocation</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.456">sizeIndexOfAllocation</a>(long&nbsp;offset)</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.465">sizeIndexOfAllocation</a>(long&nbsp;offset)</pre>
 </li>
 </ul>
 <a name="sizeOfAllocation(long)">
@@ -561,7 +564,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>sizeOfAllocation</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.463">sizeOfAllocation</a>(long&nbsp;offset)</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.472">sizeOfAllocation</a>(long&nbsp;offset)</pre>
 </li>
 </ul>
 <a name="getBuckets()">
@@ -570,7 +573,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getBuckets</h4>
-<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.Bucket</a>[]&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.517">getBuckets</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.Bucket.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.Bucket</a>[]&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.526">getBuckets</a>()</pre>
 </li>
 </ul>
 <a name="logStatistics()">
@@ -579,7 +582,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>logStatistics</h4>
-<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.521">logStatistics</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.530">logStatistics</a>()</pre>
 </li>
 </ul>
 <a name="getIndexStatistics(org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.IndexStatistics)">
@@ -588,7 +591,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getIndexStatistics</h4>
-<pre><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.IndexStatistics</a>[]&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.533">getIndexStatistics</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.IndexStatistics</a>&nbsp;grandTotal)</pre>
+<pre><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.IndexStatistics</a>[]&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.542">getIndexStatistics</a>(<a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.IndexStatistics</a>&nbsp;grandTotal)</pre>
 </li>
 </ul>
 <a name="getIndexStatistics()">
@@ -597,7 +600,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getIndexStatistics</h4>
-<pre><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.IndexStatistics</a>[]&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.544">getIndexStatistics</a>()</pre>
+<pre><a href="../../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.IndexStatistics.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketAllocator.IndexStatistics</a>[]&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.553">getIndexStatistics</a>()</pre>
 </li>
 </ul>
 <a name="freeBlock(long[])">
@@ -606,7 +609,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>freeBlock</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.551">freeBlock</a>(long[]&nbsp;freeList)</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.html#line.560">freeBlock</a>(long[]&nbsp;freeList)</pre>
 </li>
 </ul>
 </li>


[14/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
index d8b6ca7..66dbcf3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.html
@@ -31,12 +31,12 @@
 <span class="sourceLineNo">023</span>import java.nio.ByteBuffer;<a name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.ByteBufferedCell;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.Cell;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HConstants;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.31"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.Cell;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.31"></a>
 <span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.KeyValue.Type;<a name="line.32"></a>
 <span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.SettableSequenceId;<a name="line.34"></a>
@@ -60,1113 +60,1115 @@
 <span class="sourceLineNo">052</span> */<a name="line.52"></a>
 <span class="sourceLineNo">053</span>@InterfaceAudience.Private<a name="line.53"></a>
 <span class="sourceLineNo">054</span>abstract class BufferedDataBlockEncoder implements DataBlockEncoder {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static int INITIAL_KEY_BUFFER_SIZE = 512;<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  @Override<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public ByteBuffer decodeKeyValues(DataInputStream source,<a name="line.59"></a>
-<span class="sourceLineNo">060</span>      HFileBlockDecodingContext blkDecodingCtx) throws IOException {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      throw new IOException(this.getClass().getName() + " only accepts "<a name="line.62"></a>
-<span class="sourceLineNo">063</span>          + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>    HFileBlockDefaultDecodingContext decodingCtx =<a name="line.66"></a>
-<span class="sourceLineNo">067</span>        (HFileBlockDefaultDecodingContext) blkDecodingCtx;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    if (decodingCtx.getHFileContext().isIncludesTags()<a name="line.68"></a>
-<span class="sourceLineNo">069</span>        &amp;&amp; decodingCtx.getHFileContext().isCompressTags()) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      if (decodingCtx.getTagCompressionContext() != null) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        // It will be overhead to create the TagCompressionContext again and again for every block<a name="line.71"></a>
-<span class="sourceLineNo">072</span>        // decoding.<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        decodingCtx.getTagCompressionContext().clear();<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      } else {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>        try {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>          TagCompressionContext tagCompressionContext = new TagCompressionContext(<a name="line.76"></a>
-<span class="sourceLineNo">077</span>              LRUDictionary.class, Byte.MAX_VALUE);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>          decodingCtx.setTagCompressionContext(tagCompressionContext);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>        } catch (Exception e) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>          throw new IOException("Failed to initialize TagCompressionContext", e);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>        }<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    return internalDecodeKeyValues(source, 0, 0, decodingCtx);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  /********************* common prefixes *************************/<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  // Having this as static is fine but if META is having DBE then we should<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  // change this.<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public static int compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    return Bytes.compareTo(left.getRowArray(), left.getRowOffset() + rowCommonPrefix,<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        left.getRowLength() - rowCommonPrefix, right.getRowArray(), right.getRowOffset()<a name="line.92"></a>
-<span class="sourceLineNo">093</span>            + rowCommonPrefix, right.getRowLength() - rowCommonPrefix);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static int compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset() + familyCommonPrefix,<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        left.getFamilyLength() - familyCommonPrefix, right.getFamilyArray(),<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        right.getFamilyOffset() + familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public static int compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset() + qualCommonPrefix,<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        left.getQualifierLength() - qualCommonPrefix, right.getQualifierArray(),<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        right.getQualifierOffset() + qualCommonPrefix, right.getQualifierLength()<a name="line.105"></a>
-<span class="sourceLineNo">106</span>            - qualCommonPrefix);<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected static class SeekerState {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    protected ByteBuff currentBuffer;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    protected TagCompressionContext tagCompressionContext;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    protected int valueOffset = -1;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    protected int keyLength;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    protected int valueLength;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    protected int lastCommonPrefix;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    protected int tagsLength = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    protected int tagsOffset = -1;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    protected int tagsCompressedLength = 0;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    protected boolean uncompressTags = true;<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    /** We need to store a copy of the key. */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    protected byte[] keyBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    protected byte[] tagsBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    protected long memstoreTS;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    protected int nextKvOffset;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    protected KeyValue.KeyOnlyKeyValue currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    // many object creations.<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    private final ObjectIntPair&lt;ByteBuffer&gt; tmpPair;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    private final boolean includeTags;<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public SeekerState(ObjectIntPair&lt;ByteBuffer&gt; tmpPair, boolean includeTags) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      this.tmpPair = tmpPair;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      this.includeTags = includeTags;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    protected boolean isValid() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      return valueOffset != -1;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    protected void invalidate() {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      valueOffset = -1;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      tagsCompressedLength = 0;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      currentKey = new KeyValue.KeyOnlyKeyValue();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      uncompressTags = true;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      currentBuffer = null;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    protected void ensureSpaceForKey() {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (keyLength &gt; keyBuffer.length) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        // rare case, but we need to handle arbitrary length of key<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        int newKeyBufferLength = Math.max(keyBuffer.length, 1) * 2;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        while (keyLength &gt; newKeyBufferLength) {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>          newKeyBufferLength *= 2;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        }<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        byte[] newKeyBuffer = new byte[newKeyBufferLength];<a name="line.157"></a>
-<span class="sourceLineNo">158</span>        System.arraycopy(keyBuffer, 0, newKeyBuffer, 0, keyBuffer.length);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        keyBuffer = newKeyBuffer;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>    protected void ensureSpaceForTags() {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      if (tagsLength &gt; tagsBuffer.length) {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        // rare case, but we need to handle arbitrary length of tags<a name="line.165"></a>
-<span class="sourceLineNo">166</span>        int newTagsBufferLength = Math.max(tagsBuffer.length, 1) * 2;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        while (tagsLength &gt; newTagsBufferLength) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>          newTagsBufferLength *= 2;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        byte[] newTagsBuffer = new byte[newTagsBufferLength];<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        System.arraycopy(tagsBuffer, 0, newTagsBuffer, 0, tagsBuffer.length);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        tagsBuffer = newTagsBuffer;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>    protected void setKey(byte[] keyBuffer, long memTS) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      currentKey.setKey(keyBuffer, 0, keyLength);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      memstoreTS = memTS;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    /**<a name="line.181"></a>
-<span class="sourceLineNo">182</span>     * Copy the state from the next one into this instance (the previous state<a name="line.182"></a>
-<span class="sourceLineNo">183</span>     * placeholder). Used to save the previous state when we are advancing the<a name="line.183"></a>
-<span class="sourceLineNo">184</span>     * seeker to the next key/value.<a name="line.184"></a>
-<span class="sourceLineNo">185</span>     */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    protected void copyFromNext(SeekerState nextState) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      if (keyBuffer.length != nextState.keyBuffer.length) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>        keyBuffer = nextState.keyBuffer.clone();<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      } else if (!isValid()) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        // Note: we can only call isValid before we override our state, so this<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        // comes before all the assignments at the end of this method.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        System.arraycopy(nextState.keyBuffer, 0, keyBuffer, 0,<a name="line.192"></a>
-<span class="sourceLineNo">193</span>             nextState.keyLength);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      } else {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        // don't copy the common prefix between this key and the previous one<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        System.arraycopy(nextState.keyBuffer, nextState.lastCommonPrefix,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>            keyBuffer, nextState.lastCommonPrefix, nextState.keyLength<a name="line.197"></a>
-<span class="sourceLineNo">198</span>                - nextState.lastCommonPrefix);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      }<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      currentKey = nextState.currentKey;<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>      valueOffset = nextState.valueOffset;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      keyLength = nextState.keyLength;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      valueLength = nextState.valueLength;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      lastCommonPrefix = nextState.lastCommonPrefix;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      nextKvOffset = nextState.nextKvOffset;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      memstoreTS = nextState.memstoreTS;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      currentBuffer = nextState.currentBuffer;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      tagsOffset = nextState.tagsOffset;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      tagsLength = nextState.tagsLength;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (nextState.tagCompressionContext != null) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        tagCompressionContext = nextState.tagCompressionContext;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    public Cell toCell() {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      // Buffer backing the value and tags part from the HFileBlock's buffer<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      // When tag compression in use, this will be only the value bytes area.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      ByteBuffer valAndTagsBuffer;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      int vOffset;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      int valAndTagsLength = this.valueLength;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      int tagsLenSerializationSize = 0;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      if (this.includeTags &amp;&amp; this.tagCompressionContext == null) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        // Include the tags part also. This will be the tags bytes + 2 bytes of for storing tags<a name="line.224"></a>
-<span class="sourceLineNo">225</span>        // length<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        tagsLenSerializationSize = this.tagsOffset - (this.valueOffset + this.valueLength);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        valAndTagsLength += tagsLenSerializationSize + this.tagsLength;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      this.currentBuffer.asSubByteBuffer(this.valueOffset, valAndTagsLength, this.tmpPair);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      valAndTagsBuffer = this.tmpPair.getFirst();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      vOffset = this.tmpPair.getSecond();// This is the offset to value part in the BB<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      if (valAndTagsBuffer.hasArray()) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        return toOnheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      } else {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        return toOffheapCell(valAndTagsBuffer, vOffset, tagsLenSerializationSize);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>    private Cell toOnheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>        int tagsLenSerializationSize) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      byte[] tagsArray = HConstants.EMPTY_BYTE_ARRAY;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      int tOffset = 0;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      if (this.includeTags) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        if (this.tagCompressionContext == null) {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          tagsArray = valAndTagsBuffer.array();<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          tOffset = valAndTagsBuffer.arrayOffset() + vOffset + this.valueLength<a name="line.246"></a>
-<span class="sourceLineNo">247</span>              + tagsLenSerializationSize;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        } else {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          tagsArray = Bytes.copy(tagsBuffer, 0, this.tagsLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          tOffset = 0;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      return new OnheapDecodedCell(Bytes.copy(keyBuffer, 0, this.keyLength),<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer.array(),<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          valAndTagsBuffer.arrayOffset() + vOffset, this.valueLength, memstoreTS, tagsArray,<a name="line.257"></a>
-<span class="sourceLineNo">258</span>          tOffset, this.tagsLength);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>    private Cell toOffheapCell(ByteBuffer valAndTagsBuffer, int vOffset,<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        int tagsLenSerializationSize) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      ByteBuffer tagsBuf =  HConstants.EMPTY_BYTE_BUFFER;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      int tOffset = 0;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (this.includeTags) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        if (this.tagCompressionContext == null) {<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          tagsBuf = valAndTagsBuffer;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          tOffset = vOffset + this.valueLength + tagsLenSerializationSize;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        } else {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          tagsBuf = ByteBuffer.wrap(Bytes.copy(tagsBuffer, 0, this.tagsLength));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          tOffset = 0;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return new OffheapDecodedCell(ByteBuffer.wrap(Bytes.copy(keyBuffer, 0, this.keyLength)),<a name="line.274"></a>
-<span class="sourceLineNo">275</span>          currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(),<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          currentKey.getQualifierOffset(), currentKey.getQualifierLength(),<a name="line.276"></a>
-<span class="sourceLineNo">277</span>          currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer, vOffset,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          this.valueLength, memstoreTS, tagsBuf, tOffset, this.tagsLength);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    }<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * Copies only the key part of the keybuffer by doing a deep copy and passes the<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   * seeker state members for taking a clone.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Note that the value byte[] part is still pointing to the currentBuffer and<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * represented by the valueOffset and valueLength<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  // We return this as a Cell to the upper layers of read flow and might try setting a new SeqId<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  // there. So this has to be an instance of SettableSequenceId.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  protected static class OnheapDecodedCell implements Cell, HeapSize, SettableSequenceId,<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      Streamable {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.293"></a>
-<span class="sourceLineNo">294</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY));<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    private byte[] keyOnlyBuffer;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    private short rowLength;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    private int familyOffset;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    private byte familyLength;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    private int qualifierOffset;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    private int qualifierLength;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    private long timestamp;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    private byte typeByte;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    private byte[] valueBuffer;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    private int valueOffset;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    private int valueLength;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    private byte[] tagsBuffer;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    private int tagsOffset;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    private int tagsLength;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    private long seqId;<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>    protected OnheapDecodedCell(byte[] keyBuffer, short rowLength, int familyOffset,<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        byte[] valueBuffer, int valueOffset, int valueLen, long seqId, byte[] tagsBuffer,<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        int tagsOffset, int tagsLength) {<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      this.keyOnlyBuffer = keyBuffer;<a name="line.315"></a>
-<span class="sourceLineNo">316</span>      this.rowLength = rowLength;<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      this.familyOffset = familyOffset;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      this.familyLength = familyLength;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>      this.qualifierOffset = qualOffset;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      this.qualifierLength = qualLength;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>      this.timestamp = timeStamp;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      this.typeByte = typeByte;<a name="line.322"></a>
-<span class="sourceLineNo">323</span>      this.valueBuffer = valueBuffer;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      this.valueOffset = valueOffset;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      this.valueLength = valueLen;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      this.tagsBuffer = tagsBuffer;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      this.tagsOffset = tagsOffset;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      this.tagsLength = tagsLength;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      setSequenceId(seqId);<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>    @Override<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    public byte[] getRowArray() {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      return keyOnlyBuffer;<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    }<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>    @Override<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    public byte[] getFamilyArray() {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>      return keyOnlyBuffer;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>    @Override<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    public byte[] getQualifierArray() {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>      return keyOnlyBuffer;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
-<span class="sourceLineNo">346</span><a name="line.346"></a>
-<span class="sourceLineNo">347</span>    @Override<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    public int getRowOffset() {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      return Bytes.SIZEOF_SHORT;<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>    @Override<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    public short getRowLength() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      return rowLength;<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>    @Override<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    public int getFamilyOffset() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return familyOffset;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>    @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    public byte getFamilyLength() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      return familyLength;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>    @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    public int getQualifierOffset() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      return qualifierOffset;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    }<a name="line.370"></a>
-<span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>    @Override<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    public int getQualifierLength() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      return qualifierLength;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>    @Override<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    public long getTimestamp() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      return timestamp;<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>    @Override<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    public byte getTypeByte() {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      return typeByte;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>    @Override<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    public long getSequenceId() {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      return seqId;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>    @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    public byte[] getValueArray() {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      return this.valueBuffer;<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    @Override<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    public int getValueOffset() {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      return valueOffset;<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>    @Override<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    public int getValueLength() {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      return valueLength;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    @Override<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    public byte[] getTagsArray() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      return this.tagsBuffer;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    @Override<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    public int getTagsOffset() {<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      return this.tagsOffset;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    }<a name="line.415"></a>
-<span class="sourceLineNo">416</span><a name="line.416"></a>
-<span class="sourceLineNo">417</span>    @Override<a name="line.417"></a>
-<span class="sourceLineNo">418</span>    public int getTagsLength() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      return tagsLength;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    @Override<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public String toString() {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          + getValueLength() + "/seqid=" + seqId;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    public void setSequenceId(long seqId) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      this.seqId = seqId;<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    @Override<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    public long heapSize() {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>    @Override<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    public int write(OutputStream out) throws IOException {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      return write(out, true);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    @Override<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>          tagsLength, withTags);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      ByteBufferUtils.putInt(out, keyOnlyBuffer.length);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // Write key<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      out.write(keyOnlyBuffer);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      // Write value<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      out.write(this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      if (withTags) {<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        // 2 bytes tags length followed by tags bytes<a name="line.455"></a>
-<span class="sourceLineNo">456</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        out.write(this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  protected static class OffheapDecodedCell extends ByteBufferedCell implements HeapSize,<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      SettableSequenceId, Streamable {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT<a name="line.468"></a>
-<span class="sourceLineNo">469</span>        + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.BYTE_BUFFER));<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    private ByteBuffer keyBuffer;<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    private short rowLength;<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    private int familyOffset;<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    private byte familyLength;<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    private int qualifierOffset;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    private int qualifierLength;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    private long timestamp;<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    private byte typeByte;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    private ByteBuffer valueBuffer;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    private int valueOffset;<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    private int valueLength;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private ByteBuffer tagsBuffer;<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    private int tagsOffset;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    private int tagsLength;<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    private long seqId;<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>    protected OffheapDecodedCell(ByteBuffer keyBuffer, short rowLength, int familyOffset,<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        byte familyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte,<a name="line.488"></a>
-<span class="sourceLineNo">489</span>        ByteBuffer valueBuffer, int valueOffset, int valueLen, long seqId, ByteBuffer tagsBuffer,<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        int tagsOffset, int tagsLength) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      // The keyBuffer is always onheap<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      assert keyBuffer.hasArray();<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      assert keyBuffer.arrayOffset() == 0;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      this.keyBuffer = keyBuffer;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      this.rowLength = rowLength;<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      this.familyOffset = familyOffset;<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      this.familyLength = familyLength;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      this.qualifierOffset = qualOffset;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      this.qualifierLength = qualLength;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      this.timestamp = timeStamp;<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      this.typeByte = typeByte;<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      this.valueBuffer = valueBuffer;<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      this.valueOffset = valueOffset;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      this.valueLength = valueLen;<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      this.tagsBuffer = tagsBuffer;<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      this.tagsOffset = tagsOffset;<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      this.tagsLength = tagsLength;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      setSequenceId(seqId);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>    @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    public byte[] getRowArray() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      return this.keyBuffer.array();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>    @Override<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    public int getRowOffset() {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      return getRowPosition();<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>    @Override<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    public short getRowLength() {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      return this.rowLength;<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
-<span class="sourceLineNo">525</span><a name="line.525"></a>
-<span class="sourceLineNo">526</span>    @Override<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    public byte[] getFamilyArray() {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return this.keyBuffer.array();<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    @Override<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    public int getFamilyOffset() {<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      return getFamilyPosition();<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    }<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    @Override<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    public byte getFamilyLength() {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>      return this.familyLength;<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    }<a name="line.539"></a>
-<span class="sourceLineNo">540</span><a name="line.540"></a>
-<span class="sourceLineNo">541</span>    @Override<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    public byte[] getQualifierArray() {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      return this.keyBuffer.array();<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    }<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    @Override<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    public int getQualifierOffset() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      return getQualifierPosition();<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    @Override<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    public int getQualifierLength() {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      return this.qualifierLength;<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    }<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    @Override<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    public long getTimestamp() {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      return this.timestamp;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
-<span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>    @Override<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    public byte getTypeByte() {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      return this.typeByte;<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>    @Override<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    public long getSequenceId() {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return this.seqId;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>    @Override<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    public byte[] getValueArray() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      return CellUtil.cloneValue(this);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    @Override<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    public int getValueOffset() {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      return 0;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>    @Override<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    public int getValueLength() {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      return this.valueLength;<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>    @Override<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    public byte[] getTagsArray() {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      return CellUtil.cloneTags(this);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    }<a name="line.589"></a>
-<span class="sourceLineNo">590</span><a name="line.590"></a>
-<span class="sourceLineNo">591</span>    @Override<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    public int getTagsOffset() {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      return 0;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    }<a name="line.594"></a>
-<span class="sourceLineNo">595</span><a name="line.595"></a>
-<span class="sourceLineNo">596</span>    @Override<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    public int getTagsLength() {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      return this.tagsLength;<a name="line.598"></a>
-<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
-<span class="sourceLineNo">600</span><a name="line.600"></a>
-<span class="sourceLineNo">601</span>    @Override<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    public ByteBuffer getRowByteBuffer() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return this.keyBuffer;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    @Override<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    public int getRowPosition() {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      return Bytes.SIZEOF_SHORT;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>    @Override<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    public ByteBuffer getFamilyByteBuffer() {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      return this.keyBuffer;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>    @Override<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    public int getFamilyPosition() {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      return this.familyOffset;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    }<a name="line.619"></a>
-<span class="sourceLineNo">620</span><a name="line.620"></a>
-<span class="sourceLineNo">621</span>    @Override<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    public ByteBuffer getQualifierByteBuffer() {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      return this.keyBuffer;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    }<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>    @Override<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    public int getQualifierPosition() {<a name="line.627"></a>
-<span class="sourceLineNo">628</span>      return this.qualifierOffset;<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    @Override<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    public ByteBuffer getValueByteBuffer() {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      return this.valueBuffer;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
-<span class="sourceLineNo">635</span><a name="line.635"></a>
-<span class="sourceLineNo">636</span>    @Override<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    public int getValuePosition() {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      return this.valueOffset;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
-<span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>    @Override<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    public ByteBuffer getTagsByteBuffer() {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      return this.tagsBuffer;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
-<span class="sourceLineNo">645</span><a name="line.645"></a>
-<span class="sourceLineNo">646</span>    @Override<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    public int getTagsPosition() {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return this.tagsOffset;<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
-<span class="sourceLineNo">650</span><a name="line.650"></a>
-<span class="sourceLineNo">651</span>    @Override<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    public long heapSize() {<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>    @Override<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    public void setSequenceId(long seqId) {<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      this.seqId = seqId;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span><a name="line.660"></a>
-<span class="sourceLineNo">661</span>    @Override<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    public int write(OutputStream out) throws IOException {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>      return write(out, true);<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    }<a name="line.664"></a>
-<span class="sourceLineNo">665</span><a name="line.665"></a>
-<span class="sourceLineNo">666</span>    @Override<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public int write(OutputStream out, boolean withTags) throws IOException {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          tagsLength, withTags);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      ByteBufferUtils.putInt(out, lenToWrite);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      ByteBufferUtils.putInt(out, keyBuffer.capacity());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      ByteBufferUtils.putInt(out, valueLength);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      // Write key<a name="line.673"></a>
-<span class="sourceLineNo">674</span>      out.write(keyBuffer.array());<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      // Write value<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      ByteBufferUtils.copyBufferToStream(out, this.valueBuffer, this.valueOffset, this.valueLength);<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      if (withTags) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>        // 2 bytes tags length followed by tags bytes<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        // tags length is serialized with 2 bytes only(short way) even if the type is int.<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        // As this is non -ve numbers, we save the sign bit. See HBASE-11437<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        out.write((byte) (0xff &amp; (this.tagsLength &gt;&gt; 8)));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>        out.write((byte) (0xff &amp; this.tagsLength));<a name="line.682"></a>
-<span class="sourceLineNo">683</span>        ByteBufferUtils.copyBufferToStream(out, this.tagsBuffer, this.tagsOffset, this.tagsLength);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      return lenToWrite + Bytes.SIZEOF_INT;<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  protected abstract static class<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      BufferedEncodedSeeker&lt;STATE extends SeekerState&gt;<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      implements EncodedSeeker {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    protected HFileBlockDecodingContext decodingCtx;<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    protected final CellComparator comparator;<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    protected ByteBuff currentBuffer;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    protected TagCompressionContext tagCompressionContext = null;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    protected  KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    // many object creations.<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    protected final ObjectIntPair&lt;ByteBuffer&gt; tmpPair = new ObjectIntPair&lt;ByteBuffer&gt;();<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    protected STATE current, previous;<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>    public BufferedEncodedSeeker(CellComparator comparator,<a name="line.702"></a>
-<span class="sourceLineNo">703</span>        HFileBlockDecodingContext decodingCtx) {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      this.comparator = comparator;<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.decodingCtx = decodingCtx;<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      if (decodingCtx.getHFileContext().isCompressTags()) {<a name="line.706"></a>
-<span class="sourceLineNo">707</span>        try {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>          tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>        } catch (Exception e) {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>          throw new RuntimeException("Failed to initialize TagCompressionContext", e);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>        }<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      }<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      current = createSeekerState(); // always valid<a name="line.713"></a>
-<span class="sourceLineNo">714</span>      previous = createSeekerState(); // may not be valid<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    }<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>    protected boolean includesMvcc() {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>      return this.decodingCtx.getHFileContext().isIncludesMvcc();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    }<a name="line.719"></a>
-<span class="sourceLineNo">720</span><a name="line.720"></a>
-<span class="sourceLineNo">721</span>    protected boolean includesTags() {<a name="line.721"></a>
-<span class="sourceLineNo">722</span>      return this.decodingCtx.getHFileContext().isIncludesTags();<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    }<a name="line.723"></a>
-<span class="sourceLineNo">724</span><a name="line.724"></a>
-<span class="sourceLineNo">725</span>    @Override<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    public int compareKey(CellComparator comparator, Cell key) {<a name="line.726"></a>
-<span class="sourceLineNo">727</span>      keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return comparator.compareKeyIgnoresMvcc(key, keyOnlyKV);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    @Override<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    public void setCurrentBuffer(ByteBuff buffer) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      if (this.tagCompressionContext != null) {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>        this.tagCompressionContext.clear();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      currentBuffer = buffer;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>      current.currentBuffer = currentBuffer;<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      if(tagCompressionContext != null) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>        current.tagCompressionContext = tagCompressionContext;<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>      decodeFirst();<a name="line.741"></a>
-<span class="sourceLineNo">742</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      previous.invalidate();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    }<a name="line.744"></a>
-<span class="sourceLineNo">745</span><a name="line.745"></a>
-<span class="sourceLineNo">746</span>    @Override<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    public Cell getKey() {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      byte[] key = new byte[current.keyLength];<a name="line.748"></a>
-<span class="sourceLineNo">749</span>      System.arraycopy(current.keyBuffer, 0, key, 0, current.keyLength);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return new KeyValue.KeyOnlyKeyValue(key);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public ByteBuffer getValueShallowCopy() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      currentBuffer.asSubByteBuffer(current.valueOffset, current.valueLength, tmpPair);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      ByteBuffer dup = tmpPair.getFirst().duplicate();<a name="line.756"></a>
-<span class="sourceLineNo">757</span>      dup.position(tmpPair.getSecond());<a name="line.757"></a>
-<span class="sourceLineNo">758</span>      dup.limit(tmpPair.getSecond() + current.valueLength);<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      return dup.slice();<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    }<a name="line.760"></a>
-<span class="sourceLineNo">761</span><a name="line.761"></a>
-<span class="sourceLineNo">762</span>    @Override<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    public Cell getCell() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>      return current.toCell();<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    }<a name="line.765"></a>
-<span class="sourceLineNo">766</span><a name="line.766"></a>
-<span class="sourceLineNo">767</span>    @Override<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    public void rewind() {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      currentBuffer.rewind();<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      if (tagCompressionContext != null) {<a name="line.770"></a>
-<span class="sourceLineNo">771</span>        tagCompressionContext.clear();<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      }<a name="line.772"></a>
-<span class="sourceLineNo">773</span>      decodeFirst();<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      previous.invalidate();<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public boolean next() {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      if (!currentBuffer.hasRemaining()) {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        return false;<a name="line.781"></a>
-<span class="sourceLineNo">782</span>      }<a name="line.782"></a>
-<span class="sourceLineNo">783</span>      decodeNext();<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      previous.invalidate();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>      return true;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span><a name="line.788"></a>
-<span class="sourceLineNo">789</span>    protected void decodeTags() {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      current.tagsLength = ByteBuff.readCompressedInt(currentBuffer);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      if (tagCompressionContext != null) {<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        if (current.uncompressTags) {<a name="line.792"></a>
-<span class="sourceLineNo">793</span>          // Tag compression is been used. uncompress it into tagsBuffer<a name="line.793"></a>
-<span class="sourceLineNo">794</span>          current.ensureSpaceForTags();<a name="line.794"></a>
-<span class="sourceLineNo">795</span>          try {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            current.tagsCompressedLength = tagCompressionContext.uncompressTags(currentBuffer,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>                current.tagsBuffer, 0, current.tagsLength);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>          } catch (IOException e) {<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            throw new RuntimeException("Exception while uncompressing tags", e);<a name="line.799"></a>
-<span class="sourceLineNo">800</span>          }<a name="line.800"></a>
-<span class="sourceLineNo">801</span>        } else {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>          currentBuffer.skip(current.tagsCompressedLength);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>          current.uncompressTags = true;// Reset this.<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        }<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        current.tagsOffset = -1;<a name="line.805"></a>
-<span class="sourceLineNo">806</span>      } else {<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        // When tag compress is not used, let us not do copying of tags bytes into tagsBuffer.<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // Just mark the tags Offset so as to create the KV buffer later in getKeyValueBuffer()<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        current.tagsOffset = currentBuffer.position();<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        currentBuffer.skip(current.tagsLength);<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      }<a name="line.811"></a>
-<span class="sourceLineNo">812</span>    }<a name="line.812"></a>
-<span class="sourceLineNo">813</span><a name="line.813"></a>
-<span class="sourceLineNo">814</span>    @Override<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      int rowCommonPrefix = 0;<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      int familyCommonPrefix = 0;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      int qualCommonPrefix = 0;<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      previous.invalidate();<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      do {<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        int comp;<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);<a name="line.822"></a>
-<span class="sourceLineNo">823</span>        if (current.lastCommonPrefix != 0) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>          // The KV format has row key length also in the byte array. The<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          // common prefix<a name="line.825"></a>
-<span class="sourceLineNo">826</span>          // includes it. So we need to subtract to find out the common prefix<a name="line.826"></a>
-<span class="sourceLineNo">827</span>          // in the<a name="line.827"></a>
-<span class="sourceLineNo">828</span>          // row part alone<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          rowCommonPrefix = Math.min(rowCommonPrefix, current.lastCommonPrefix - 2);<a name="line.829"></a>
-<span class="sourceLineNo">830</span>        }<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        if (current.lastCommonPrefix &lt;= 2) {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>          rowCommonPrefix = 0;<a name="line.832"></a>
-<span class="sourceLineNo">833</span>        }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        rowCommonPrefix += findCommonPrefixInRowPart(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        comp = compareCommonRowPrefix(seekCell, keyOnlyKV, rowCommonPrefix);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        if (comp == 0) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          comp = compareTypeBytes(seekCell, keyOnlyKV);<a name="line.837"></a>
-<span class="sourceLineNo">838</span>          if (comp == 0) {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>            // Subtract the fixed row key length and the family key fixed length<a name="line.839"></a>
-<span class="sourceLineNo">840</span>            familyCommonPrefix = Math.max(<a name="line.840"></a>
-<span class="sourceLineNo">841</span>                0,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>                Math.min(familyCommonPrefix,<a name="line.842"></a>
-<span class="sourceLineNo">843</span>                    current.lastCommonPrefix - (3 + keyOnlyKV.getRowLength())));<a name="line.843"></a>
-<span class="sourceLineNo">844</span>            familyCommonPrefix += findCommonPrefixInFamilyPart(seekCell, keyOnlyKV,<a name="line.844"></a>
-<span class="sourceLineNo">845</span>                familyCommonPrefix);<a name="line.845"></a>
-<span class="sourceLineNo">846</span>            comp = compareCommonFamilyPrefix(seekCell, keyOnlyKV, familyCommonPrefix);<a name="line.846"></a>
-<span class="sourceLineNo">847</span>            if (comp == 0) {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>              // subtract the rowkey fixed length and the family key fixed<a name="line.848"></a>
-<span class="sourceLineNo">849</span>              // length<a name="line.849"></a>
-<span class="sourceLineNo">850</span>              qualCommonPrefix = Math.max(<a name="line.850"></a>
-<span class="sourceLineNo">851</span>                  0,<a name="line.851"></a>
-<span class="sourceLineNo">852</span>                  Math.min(<a name="line.852"></a>
-<span class="sourceLineNo">853</span>                      qualCommonPrefix,<a name="line.853"></a>
-<span class="sourceLineNo">854</span>                      current.lastCommonPrefix<a name="line.854"></a>
-<span class="sourceLineNo">855</span>                          - (3 + keyOnlyKV.getRowLength() + keyOnlyKV.getFamilyLength())));<a name="line.855"></a>
-<span class="sourceLineNo">856</span>              qualCommonPrefix += findCommonPrefixInQualifierPart(seekCell, keyOnlyKV,<a name="line.856"></a>
-<span class="sourceLineNo">857</span>                  qualCommonPrefix);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>              comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>              if (comp == 0) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>                comp = CellComparator.compareTimestamps(seekCell, keyOnlyKV);<a name="line.860"></a>
-<span class="sourceLineNo">861</span>                if (comp == 0) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>                  // Compare types. Let the delete types sort ahead of puts;<a name="line.862"></a>
-<span class="sourceLineNo">863</span>                  // i.e. types<a name="line.863"></a>
-<span class="sourceLineNo">864</span>                  // of higher numbers sort before those of lesser numbers.<a name="line.864"></a>
-<span class="sourceLineNo">865</span>                  // Maximum<a name="line.865"></a>
-<span class="sourceLineNo">866</span>                  // (255)<a name="line.866"></a>
-<span class="sourceLineNo">867</span>                  // appears ahead of everything, and minimum (0) appears<a name="line.867"></a>
-<span class="sourceLineNo">868</span>                  // after<a name="line.868"></a>
-<span class="sourceLineNo">869</span>                  // everything.<a name="line.869"></a>
-<span class="sourceLineNo">870</span>                  comp = (0xff &amp; keyOnlyKV.getTypeByte()) - (0xff &amp; seekCell.getTypeByte());<a name="line.870"></a>
-<span class="sourceLineNo">871</span>                }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>              }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            }<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        if (comp == 0) { // exact match<a name="line.876"></a>
-<span class="sourceLineNo">877</span>          if (seekBefore) {<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            if (!previous.isValid()) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>              // The caller (seekBefore) has to ensure that we are not at the<a name="line.879"></a>
-<span class="sourceLineNo">880</span>              // first key in the block.<a name="line.880"></a>
-<span class="sourceLineNo">881</span>              throw new IllegalStateException("Cannot seekBefore if "<a name="line.881"></a>
-<span class="sourceLineNo">882</span>                  + "positioned at the first key in the block: key="<a name="line.882"></a>
-<span class="sourceLineNo">883</span>                  + Bytes.toStringBinary(seekCell.getRowArray()));<a name="line.883"></a>
-<span class="sourceLineNo">884</span>            }<a name="line.884"></a>
-<span class="sourceLineNo">885</span>            moveToPrevious();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>            return 1;<a name="line.886"></a>
-<span class="sourceLineNo">887</span>          }<a name="line.887"></a>
-<span class="sourceLineNo">888</span>          return 0;<a name="line.888"></a>
-<span class="sourceLineNo">889</span>        }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>        if (comp &lt; 0) { // already too large, check previous<a name="line.891"></a>
-<span class="sourceLineNo">892</span>          if (previous.isValid()) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>            moveToPrevious();<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          } else {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>            return HConstants.INDEX_KEY_MAGIC; // using optimized index key<a name="line.895"></a>
-<span class="sourceLineNo">896</span>          }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>          return 1;<a name="line.897"></a>
-<span class="sourceLineNo">898</span>        }<a name="line.898"></a>
-<span class="sourceLineNo">899</span><a name="line.899"></a>
-<span class="sourceLineNo">900</span>        // move to next, if more data is available<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        if (currentBuffer.hasRemaining()) {<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          previous.copyFromNext(current);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>          decodeNext();<a name="line.903"></a>
-<span class="sourceLineNo">904</span>          current.setKey(current.keyBuffer, current.memstoreTS);<a name="line.904"></a>
-<span class="sourceLineNo">905</span>        } else {<a name="line.905"></a>
-<span class="sourceLineNo">906</span>          break;<a name="line.906"></a>
-<span class="sourceLineNo">907</span>        }<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      } while (true);<a name="line.908"></a>
-<span class="sourceLineNo">909</span><a name="line.909"></a>
-<span class="sourceLineNo">910</span>      // we hit the end of the block, not an exact match<a name="line.910"></a>
-<span class="sourceLineNo">911</span>      return 1;<a name="line.911"></a>
-<span class="sourceLineNo">912</span>    }<a name="line.912"></a>
-<span class="sourceLineNo">913</span><a name="line.913"></a>
-<span class="sourceLineNo">914</span>    private int compareTypeBytes(Cell key, Cell right) {<a name="line.914"></a>
-<span class="sourceLineNo">915</span>      if (key.getFamilyLength() + key.getQualifierLength() == 0<a name="line.915"></a>
-<span class="sourceLineNo">916</span>          &amp;&amp; key.getTypeByte() == Type.Minimum.getCode()) {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>        // left is "bigger", i.e. it appears later in the sorted order<a name="line.917"></a>
-<span class="sourceLineNo">918</span>        return 1;<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      }<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      if (right.getFamilyLength() + right.getQualifierLength() == 0<a name="line.920"></a>
-<span class="sourceLineNo">921</span>          &amp;&amp; right.getTypeByte() == Type.Minimum.getCode()) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>        return -1;<a name="line.922"></a>
-<span class="sourceLineNo">923</span>      }<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      return 0;<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span><a name="line.926"></a>
-<span class="sourceLineNo">927</span>    private static int findCommonPrefixInRowPart(Cell left, Cell right, int rowCommonPrefix) {<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      return Bytes.findCommonPrefix(left.getRowArray(), right.getRowArray(), left.getRowLength()<a name="line.928"></a>
-<span class="sourceLineNo">929</span>          - rowCommonPrefix, right.getRowLength() - rowCommonPrefix, left.getRowOffset()<a name="line.929"></a>
-<span class="sourceLineNo">930</span>          + rowCommonPrefix, right.getRowOffset() + rowCommonPrefix);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    }<a name="line.931"></a>
-<span class="sourceLineNo">932</span><a name="line.932"></a>
-<span class="sourceLineNo">933</span>    private static int findCommonPrefixInFamilyPart(Cell left, Cell right, int familyCommonPrefix) {<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      return Bytes<a name="line.934"></a>
-<span class="sourceLineNo">935</span>          .findCommonPrefix(left.getFamilyArray(), right.getFamilyArray(), left.getFamilyLength()<a name="line.935"></a>
-<span class="sourceLineNo">936</span>              - familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix,<a name="line.936"></a>
-<span class="sourceLineNo">937</span>              left.getFamilyOffset() + familyCommonPrefix, right.getFamilyOffset()<a name="line.937"></a>
-<span class="sourceLineNo">938</span>                  + familyCommonPrefix);<a name="line.938"></a>
-<span class="sourceLineNo">939</span>    }<a name="line.939"></a>
-<span class="sourceLineNo">940</span><a name="line.940"></a>
-<span class="sourceLineNo">941</span>    private static int findCommonPrefixInQualifierPart(Cell left, Cell right,<a name="line.941"></a>
-<span class="sourceLineNo">942</span>        int qualifierCommonPrefix) {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      return Bytes.findCommonPrefix(left.getQualifierArray(), right.getQualifierArray(),<a name="line.943"></a>
-<span class="sourceLineNo">944</span>          left.getQualifierLength() - qualifierCommonPrefix, right.getQualifierLength()<a name="line.944"></a>
-<span class="sourceLineNo">945</span>              - qualifierCommonPrefix, left.getQualifierOffset() + qualifierCommonPrefix,<a name="line.945"></a>
-<span class="sourceLineNo">946</span>          right.getQualifierOffset() + qualifierCommonPrefix);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    private void moveToPrevious() {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!previous.isValid()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        throw new IllegalStateException(<a name="line.951"></a>
-<span class="sourceLineNo">952</span>            "Can move back only once and not in first key in the block.");<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      }<a name="line.953"></a>
-<span class="sourceLineNo">954</span><a name="line.954"></a>
-<span class="sourceLineNo">955</span>      STATE tmp = previous;<a name="line.955"></a>
-<span class="sourceLineNo">956</span>      previous = current;<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      current = tmp;<a name="line.957"></a>
-<span class="sourceLineNo">958</span><a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // move after last key value<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      currentBuffer.position(current.nextKvOffset);<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      // Already decoded the tag bytes. We cache this tags into current st

<TRUNCATED>

[12/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
index ec27cfe..6d8219b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
@@ -90,606 +90,612 @@
 <span class="sourceLineNo">082</span>   */<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
 <span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * If the chosen ioengine can persist its state across restarts, the path to the file to<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   * persist to.<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   */<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  public static final String BUCKET_CACHE_PERSISTENT_PATH_KEY = <a name="line.88"></a>
-<span class="sourceLineNo">089</span>      "hbase.bucketcache.persistent.path";<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>  /**<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * If the bucket cache is used in league with the lru on-heap block cache (meta blocks such<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * as indices and blooms are kept in the lru blockcache and the data blocks in the<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * bucket cache).<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static final String BUCKET_CACHE_COMBINED_KEY = <a name="line.96"></a>
-<span class="sourceLineNo">097</span>      "hbase.bucketcache.combinedcache.enabled";<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final String BUCKET_CACHE_WRITER_THREADS_KEY = "hbase.bucketcache.writer.threads";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  public static final String BUCKET_CACHE_WRITER_QUEUE_KEY = <a name="line.100"></a>
-<span class="sourceLineNo">101</span>      "hbase.bucketcache.writer.queuelength";<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  /**<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   * A comma-delimited array of values for use as bucket sizes.<a name="line.104"></a>
-<span class="sourceLineNo">105</span>   */<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  public static final String BUCKET_CACHE_BUCKETS_KEY = "hbase.bucketcache.bucket.sizes";<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /**<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * Defaults for Bucket cache<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   */<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  public static final boolean DEFAULT_BUCKET_CACHE_COMBINED = true;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public static final int DEFAULT_BUCKET_CACHE_WRITER_THREADS = 3;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  public static final int DEFAULT_BUCKET_CACHE_WRITER_QUEUE = 64;<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span> /**<a name="line.115"></a>
-<span class="sourceLineNo">116</span>   * Configuration key to prefetch all blocks of a given file into the block cache<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * when the file is opened.<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   */<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  public static final String PREFETCH_BLOCKS_ON_OPEN_KEY =<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      "hbase.rs.prefetchblocksonopen";<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  /**<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   * The target block size used by blockcache instances. Defaults to<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * {@link HConstants#DEFAULT_BLOCKSIZE}.<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * TODO: this config point is completely wrong, as it's used to determine the<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * target block size of BlockCache instances. Rename.<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public static final String BLOCKCACHE_BLOCKSIZE_KEY = "hbase.offheapcache.minblocksize";<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  private static final String EXTERNAL_BLOCKCACHE_KEY = "hbase.blockcache.use.external";<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  private static final boolean EXTERNAL_BLOCKCACHE_DEFAULT = false;<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>  private static final String EXTERNAL_BLOCKCACHE_CLASS_KEY="hbase.blockcache.external.class";<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  private static final String DROP_BEHIND_CACHE_COMPACTION_KEY="hbase.hfile.drop.behind.compaction";<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  private static final boolean DROP_BEHIND_CACHE_COMPACTION_DEFAULT = true;<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  /**<a name="line.137"></a>
-<span class="sourceLineNo">138</span>   * Enum of all built in external block caches.<a name="line.138"></a>
-<span class="sourceLineNo">139</span>   * This is used for config.<a name="line.139"></a>
-<span class="sourceLineNo">140</span>   */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  private static enum ExternalBlockCaches {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    memcached("org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    // TODO(eclark): Consider more. Redis, etc.<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    Class&lt;? extends BlockCache&gt; clazz;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    ExternalBlockCaches(String clazzName) {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      try {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>        clazz = (Class&lt;? extends BlockCache&gt;) Class.forName(clazzName);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      } catch (ClassNotFoundException cnef) {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        clazz = null;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    }<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    ExternalBlockCaches(Class&lt;? extends BlockCache&gt; clazz) {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      this.clazz = clazz;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  // Defaults<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  public static final boolean DEFAULT_CACHE_DATA_ON_READ = true;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public static final boolean DEFAULT_CACHE_DATA_ON_WRITE = false;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static final boolean DEFAULT_IN_MEMORY = false;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>  public static final boolean DEFAULT_CACHE_INDEXES_ON_WRITE = false;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  public static final boolean DEFAULT_CACHE_BLOOMS_ON_WRITE = false;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>  public static final boolean DEFAULT_EVICT_ON_CLOSE = false;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  public static final boolean DEFAULT_CACHE_DATA_COMPRESSED = false;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  public static final boolean DEFAULT_PREFETCH_ON_OPEN = false;<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  /** Local reference to the block cache, null if completely disabled */<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private final BlockCache blockCache;<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
-<span class="sourceLineNo">171</span>   * Whether blocks should be cached on read (default is on if there is a<a name="line.171"></a>
-<span class="sourceLineNo">172</span>   * cache but this can be turned off on a per-family or per-request basis).<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * If off we will STILL cache meta blocks; i.e. INDEX and BLOOM types.<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * This cannot be disabled.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   */<a name="line.175"></a>
-<span class="sourceLineNo">176</span>  private boolean cacheDataOnRead;<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>  /** Whether blocks should be flagged as in-memory when being cached */<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private final boolean inMemory;<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>  /** Whether data blocks should be cached when new files are written */<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  private boolean cacheDataOnWrite;<a name="line.182"></a>
+<span class="sourceLineNo">085</span>   * If the chosen ioengine can persist its state across restarts, the path to the file to persist<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   * to. This file is NOT the data file. It is a file into which we will serialize the map of<a name="line.86"></a>
+<span class="sourceLineNo">087</span>   * what is in the data file. For example, if you pass the following argument as<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   * BUCKET_CACHE_IOENGINE_KEY ("hbase.bucketcache.ioengine"),<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * &lt;code&gt;file:/tmp/bucketcache.data &lt;/code&gt;, then we will write the bucketcache data to the file<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   * &lt;code&gt;/tmp/bucketcache.data&lt;/code&gt; but the metadata on where the data is in the supplied file<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * is an in-memory map that needs to be persisted across restarts. Where to store this<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * in-memory state is what you supply here: e.g. &lt;code&gt;/tmp/bucketcache.map&lt;/code&gt;.<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   */<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  public static final String BUCKET_CACHE_PERSISTENT_PATH_KEY = <a name="line.94"></a>
+<span class="sourceLineNo">095</span>      "hbase.bucketcache.persistent.path";<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  /**<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * If the bucket cache is used in league with the lru on-heap block cache (meta blocks such<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * as indices and blooms are kept in the lru blockcache and the data blocks in the<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   * bucket cache).<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  public static final String BUCKET_CACHE_COMBINED_KEY = <a name="line.102"></a>
+<span class="sourceLineNo">103</span>      "hbase.bucketcache.combinedcache.enabled";<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  public static final String BUCKET_CACHE_WRITER_THREADS_KEY = "hbase.bucketcache.writer.threads";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public static final String BUCKET_CACHE_WRITER_QUEUE_KEY = <a name="line.106"></a>
+<span class="sourceLineNo">107</span>      "hbase.bucketcache.writer.queuelength";<a name="line.107"></a>
+<span class="sourceLineNo">108</span><a name="line.108"></a>
+<span class="sourceLineNo">109</span>  /**<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * A comma-delimited array of values for use as bucket sizes.<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   */<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final String BUCKET_CACHE_BUCKETS_KEY = "hbase.bucketcache.bucket.sizes";<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  /**<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   * Defaults for Bucket cache<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public static final boolean DEFAULT_BUCKET_CACHE_COMBINED = true;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public static final int DEFAULT_BUCKET_CACHE_WRITER_THREADS = 3;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  public static final int DEFAULT_BUCKET_CACHE_WRITER_QUEUE = 64;<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span> /**<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * Configuration key to prefetch all blocks of a given file into the block cache<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * when the file is opened.<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   */<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  public static final String PREFETCH_BLOCKS_ON_OPEN_KEY =<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      "hbase.rs.prefetchblocksonopen";<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  /**<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   * The target block size used by blockcache instances. Defaults to<a name="line.129"></a>
+<span class="sourceLineNo">130</span>   * {@link HConstants#DEFAULT_BLOCKSIZE}.<a name="line.130"></a>
+<span class="sourceLineNo">131</span>   * TODO: this config point is completely wrong, as it's used to determine the<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   * target block size of BlockCache instances. Rename.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   */<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public static final String BLOCKCACHE_BLOCKSIZE_KEY = "hbase.offheapcache.minblocksize";<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>  private static final String EXTERNAL_BLOCKCACHE_KEY = "hbase.blockcache.use.external";<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  private static final boolean EXTERNAL_BLOCKCACHE_DEFAULT = false;<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  private static final String EXTERNAL_BLOCKCACHE_CLASS_KEY="hbase.blockcache.external.class";<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  private static final String DROP_BEHIND_CACHE_COMPACTION_KEY="hbase.hfile.drop.behind.compaction";<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  private static final boolean DROP_BEHIND_CACHE_COMPACTION_DEFAULT = true;<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  /**<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   * Enum of all built in external block caches.<a name="line.144"></a>
+<span class="sourceLineNo">145</span>   * This is used for config.<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   */<a name="line.146"></a>
+<span class="sourceLineNo">147</span>  private static enum ExternalBlockCaches {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    memcached("org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    // TODO(eclark): Consider more. Redis, etc.<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    Class&lt;? extends BlockCache&gt; clazz;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    ExternalBlockCaches(String clazzName) {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      try {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        clazz = (Class&lt;? extends BlockCache&gt;) Class.forName(clazzName);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      } catch (ClassNotFoundException cnef) {<a name="line.154"></a>
+<span class="sourceLineNo">155</span>        clazz = null;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      }<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    }<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    ExternalBlockCaches(Class&lt;? extends BlockCache&gt; clazz) {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      this.clazz = clazz;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    }<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  }<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>  // Defaults<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  public static final boolean DEFAULT_CACHE_DATA_ON_READ = true;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>  public static final boolean DEFAULT_CACHE_DATA_ON_WRITE = false;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>  public static final boolean DEFAULT_IN_MEMORY = false;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  public static final boolean DEFAULT_CACHE_INDEXES_ON_WRITE = false;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  public static final boolean DEFAULT_CACHE_BLOOMS_ON_WRITE = false;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  public static final boolean DEFAULT_EVICT_ON_CLOSE = false;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  public static final boolean DEFAULT_CACHE_DATA_COMPRESSED = false;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>  public static final boolean DEFAULT_PREFETCH_ON_OPEN = false;<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>  /** Local reference to the block cache, null if completely disabled */<a name="line.173"></a>
+<span class="sourceLineNo">174</span>  private final BlockCache blockCache;<a name="line.174"></a>
+<span class="sourceLineNo">175</span><a name="line.175"></a>
+<span class="sourceLineNo">176</span>  /**<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * Whether blocks should be cached on read (default is on if there is a<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * cache but this can be turned off on a per-family or per-request basis).<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * If off we will STILL cache meta blocks; i.e. INDEX and BLOOM types.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * This cannot be disabled.<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  private boolean cacheDataOnRead;<a name="line.182"></a>
 <span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  /** Whether index blocks should be cached when new files are written */<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final boolean cacheIndexesOnWrite;<a name="line.185"></a>
+<span class="sourceLineNo">184</span>  /** Whether blocks should be flagged as in-memory when being cached */<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  private final boolean inMemory;<a name="line.185"></a>
 <span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /** Whether compound bloom filter blocks should be cached on write */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  private final boolean cacheBloomsOnWrite;<a name="line.188"></a>
+<span class="sourceLineNo">187</span>  /** Whether data blocks should be cached when new files are written */<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  private boolean cacheDataOnWrite;<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>  /** Whether blocks of a file should be evicted when the file is closed */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private boolean evictOnClose;<a name="line.191"></a>
+<span class="sourceLineNo">190</span>  /** Whether index blocks should be cached when new files are written */<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  private final boolean cacheIndexesOnWrite;<a name="line.191"></a>
 <span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>  /** Whether data blocks should be stored in compressed and/or encrypted form in the cache */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  private final boolean cacheDataCompressed;<a name="line.194"></a>
+<span class="sourceLineNo">193</span>  /** Whether compound bloom filter blocks should be cached on write */<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  private final boolean cacheBloomsOnWrite;<a name="line.194"></a>
 <span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  /** Whether data blocks should be prefetched into the cache */<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private final boolean prefetchOnOpen;<a name="line.197"></a>
+<span class="sourceLineNo">196</span>  /** Whether blocks of a file should be evicted when the file is closed */<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  private boolean evictOnClose;<a name="line.197"></a>
 <span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * If true and if more than one tier in this cache deploy -- e.g. CombinedBlockCache has an L1<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * and an L2 tier -- then cache data blocks up in the L1 tier (The meta blocks are likely being<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * cached up in L1 already.  At least this is the case if CombinedBlockCache).<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   */<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private boolean cacheDataInL1;<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  private final boolean dropBehindCompaction;<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  /**<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   * Create a cache configuration using the specified configuration object and<a name="line.209"></a>
-<span class="sourceLineNo">210</span>   * family descriptor.<a name="line.210"></a>
-<span class="sourceLineNo">211</span>   * @param conf hbase configuration<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   * @param family column family configuration<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  public CacheConfig(Configuration conf, HColumnDescriptor family) {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        family.isBlockCacheEnabled(),<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        family.isInMemory(),<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // For the following flags we enable them regardless of per-schema settings<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        // if they are enabled in the global configuration.<a name="line.219"></a>
-<span class="sourceLineNo">220</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY,<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            DEFAULT_CACHE_DATA_ON_WRITE) || family.isCacheDataOnWrite(),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>            DEFAULT_CACHE_INDEXES_ON_WRITE) || family.isCacheIndexesOnWrite(),<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY,<a name="line.224"></a>
-<span class="sourceLineNo">225</span>            DEFAULT_CACHE_BLOOMS_ON_WRITE) || family.isCacheBloomsOnWrite(),<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>            DEFAULT_EVICT_ON_CLOSE) || family.isEvictBlocksOnClose(),<a name="line.227"></a>
-<span class="sourceLineNo">228</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY,<a name="line.229"></a>
-<span class="sourceLineNo">230</span>            DEFAULT_PREFETCH_ON_OPEN) || family.isPrefetchBlocksOnOpen(),<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>            HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1) || family.isCacheDataInL1(),<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY,DROP_BEHIND_CACHE_COMPACTION_DEFAULT)<a name="line.233"></a>
-<span class="sourceLineNo">234</span>     );<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  }<a name="line.235"></a>
-<span class="sourceLineNo">236</span><a name="line.236"></a>
-<span class="sourceLineNo">237</span>  /**<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * Create a cache configuration using the specified configuration object and<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   * defaults for family level settings.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>   * @param conf hbase configuration<a name="line.240"></a>
-<span class="sourceLineNo">241</span>   */<a name="line.241"></a>
-<span class="sourceLineNo">242</span>  public CacheConfig(Configuration conf) {<a name="line.242"></a>
-<span class="sourceLineNo">243</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        DEFAULT_CACHE_DATA_ON_READ,<a name="line.244"></a>
-<span class="sourceLineNo">245</span>        DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set<a name="line.245"></a>
-<span class="sourceLineNo">246</span>                           // strictly from conf<a name="line.246"></a>
-<span class="sourceLineNo">247</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),<a name="line.248"></a>
-<span class="sourceLineNo">249</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),<a name="line.249"></a>
-<span class="sourceLineNo">250</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.251"></a>
-<span class="sourceLineNo">252</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1),<a name="line.254"></a>
-<span class="sourceLineNo">255</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY,DROP_BEHIND_CACHE_COMPACTION_DEFAULT)<a name="line.255"></a>
-<span class="sourceLineNo">256</span>     );<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  }<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>  /**<a name="line.259"></a>
-<span class="sourceLineNo">260</span>   * Create a block cache configuration with the specified cache and<a name="line.260"></a>
-<span class="sourceLineNo">261</span>   * configuration parameters.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>   * @param blockCache reference to block cache, null if completely disabled<a name="line.262"></a>
-<span class="sourceLineNo">263</span>   * @param cacheDataOnRead whether DATA blocks should be cached on read (we always cache INDEX<a name="line.263"></a>
-<span class="sourceLineNo">264</span>   * blocks and BLOOM blocks; this cannot be disabled).<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * @param inMemory whether blocks should be flagged as in-memory<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * @param cacheDataOnWrite whether data blocks should be cached on write<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * @param cacheIndexesOnWrite whether index blocks should be cached on write<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * @param cacheBloomsOnWrite whether blooms should be cached on write<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * @param evictOnClose whether blocks should be evicted when HFile is closed<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   * @param cacheDataCompressed whether to store blocks as compressed in the cache<a name="line.270"></a>
-<span class="sourceLineNo">271</span>   * @param prefetchOnOpen whether to prefetch blocks upon open<a name="line.271"></a>
-<span class="sourceLineNo">272</span>   * @param cacheDataInL1 If more than one cache tier deployed, if true, cache this column families<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * data blocks up in the L1 tier.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  CacheConfig(final BlockCache blockCache,<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      final boolean cacheDataInL1, final boolean dropBehindCompaction) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    this.blockCache = blockCache;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.inMemory = inMemory;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    this.evictOnClose = evictOnClose;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    this.cacheDataInL1 = cacheDataInL1;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    LOG.info(this);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>  /**<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * @param cacheConf<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    this(cacheConf.blockCache, cacheConf.cacheDataOnRead, cacheConf.inMemory,<a name="line.300"></a>
-<span class="sourceLineNo">301</span>        cacheConf.cacheDataOnWrite, cacheConf.cacheIndexesOnWrite,<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        cacheConf.cacheBloomsOnWrite, cacheConf.evictOnClose,<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        cacheConf.cacheDataCompressed, cacheConf.prefetchOnOpen,<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        cacheConf.cacheDataInL1, cacheConf.dropBehindCompaction);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  }<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /**<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * Checks whether the block cache is enabled.<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   */<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  public boolean isBlockCacheEnabled() {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    return this.blockCache != null;<a name="line.311"></a>
-<span class="sourceLineNo">312</span>  }<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>  /**<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * Returns the block cache.<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * @return the block cache, or null if caching is completely disabled<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  public BlockCache getBlockCache() {<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    return this.blockCache;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @return true if blocks should be cached on read, false if not<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   */<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  public boolean shouldCacheDataOnRead() {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    return isBlockCacheEnabled() &amp;&amp; cacheDataOnRead;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  }<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  public boolean shouldDropBehindCompaction() {<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    return dropBehindCompaction;<a name="line.332"></a>
-<span class="sourceLineNo">333</span>  }<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>  /**<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   * Should we cache a block of a particular category? We always cache<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * available.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    return isBlockCacheEnabled()<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        &amp;&amp; (cacheDataOnRead ||<a name="line.342"></a>
-<span class="sourceLineNo">343</span>            category == BlockCategory.INDEX ||<a name="line.343"></a>
-<span class="sourceLineNo">344</span>            category == BlockCategory.BLOOM ||<a name="line.344"></a>
-<span class="sourceLineNo">345</span>            (prefetchOnOpen &amp;&amp;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>                (category != BlockCategory.META &amp;&amp;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>                 category != BlockCategory.UNKNOWN)));<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  }<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   */<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  public boolean isInMemory() {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    return isBlockCacheEnabled() &amp;&amp; this.inMemory;<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
-<span class="sourceLineNo">356</span><a name="line.356"></a>
-<span class="sourceLineNo">357</span>  /**<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * @return True if cache data blocks in L1 tier (if more than one tier in block cache deploy).<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   */<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  public boolean isCacheDataInL1() {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataInL1;<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   *         written, false if not<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public boolean shouldCacheDataOnWrite() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnWrite;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>  }<a name="line.370"></a>
-<span class="sourceLineNo">371</span><a name="line.371"></a>
-<span class="sourceLineNo">372</span>  /**<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * Only used for testing.<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   *                         when an HFile is written<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   */<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  @VisibleForTesting<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Only used for testing.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param cacheDataInL1 Whether to cache data blocks up in l1 (if a multi-tier cache<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * implementation).<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  @VisibleForTesting<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  public void setCacheDataInL1(boolean cacheDataInL1) {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    this.cacheDataInL1 = cacheDataInL1;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>  }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>  /**<a name="line.392"></a>
-<span class="sourceLineNo">393</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   *         is written, false if not<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   */<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheIndexesOnWrite;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>  }<a name="line.398"></a>
-<span class="sourceLineNo">399</span><a name="line.399"></a>
-<span class="sourceLineNo">400</span>  /**<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   *         is written, false if not<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   */<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheBloomsOnWrite;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *         reader is closed, false if not<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  public boolean shouldEvictOnClose() {<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    return isBlockCacheEnabled() &amp;&amp; this.evictOnClose;<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  }<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>  /**<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * Only used for testing.<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   *                     HFile reader is closed<a name="line.419"></a>
-<span class="sourceLineNo">420</span>   */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    this.evictOnClose = evictOnClose;<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  }<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>  /**<a name="line.425"></a>
-<span class="sourceLineNo">426</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.426"></a>
-<span class="sourceLineNo">427</span>   */<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  public boolean shouldCacheDataCompressed() {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataCompressed;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  }<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  /**<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    if (!isBlockCacheEnabled()) return false;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>    switch (category) {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      case DATA:<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        return this.cacheDataCompressed;<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      default:<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        return false;<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    }<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  }<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>  /**<a name="line.445"></a>
-<span class="sourceLineNo">446</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.446"></a>
-<span class="sourceLineNo">447</span>   */<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  public boolean shouldPrefetchOnOpen() {<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    return isBlockCacheEnabled() &amp;&amp; this.prefetchOnOpen;<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  }<a name="line.450"></a>
-<span class="sourceLineNo">451</span><a name="line.451"></a>
-<span class="sourceLineNo">452</span>  /**<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   * Return true if we may find this type of block in block cache.<a name="line.453"></a>
-<span class="sourceLineNo">454</span>   * &lt;p&gt;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.455"></a>
-<span class="sourceLineNo">456</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.456"></a>
-<span class="sourceLineNo">457</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.457"></a>
-<span class="sourceLineNo">458</span>   * configuration.<a name="line.458"></a>
-<span class="sourceLineNo">459</span>   */<a name="line.459"></a>
-<span class="sourceLineNo">460</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    if (!isBlockCacheEnabled()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>      return false;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    if (cacheDataOnRead) {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      return true;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    if (prefetchOnOpen) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      return true;<a name="line.468"></a>
+<span class="sourceLineNo">199</span>  /** Whether data blocks should be stored in compressed and/or encrypted form in the cache */<a name="line.199"></a>
+<span class="sourceLineNo">200</span>  private final boolean cacheDataCompressed;<a name="line.200"></a>
+<span class="sourceLineNo">201</span><a name="line.201"></a>
+<span class="sourceLineNo">202</span>  /** Whether data blocks should be prefetched into the cache */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  private final boolean prefetchOnOpen;<a name="line.203"></a>
+<span class="sourceLineNo">204</span><a name="line.204"></a>
+<span class="sourceLineNo">205</span>  /**<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * If true and if more than one tier in this cache deploy -- e.g. CombinedBlockCache has an L1<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * and an L2 tier -- then cache data blocks up in the L1 tier (The meta blocks are likely being<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * cached up in L1 already.  At least this is the case if CombinedBlockCache).<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
+<span class="sourceLineNo">210</span>  private boolean cacheDataInL1;<a name="line.210"></a>
+<span class="sourceLineNo">211</span><a name="line.211"></a>
+<span class="sourceLineNo">212</span>  private final boolean dropBehindCompaction;<a name="line.212"></a>
+<span class="sourceLineNo">213</span><a name="line.213"></a>
+<span class="sourceLineNo">214</span>  /**<a name="line.214"></a>
+<span class="sourceLineNo">215</span>   * Create a cache configuration using the specified configuration object and<a name="line.215"></a>
+<span class="sourceLineNo">216</span>   * family descriptor.<a name="line.216"></a>
+<span class="sourceLineNo">217</span>   * @param conf hbase configuration<a name="line.217"></a>
+<span class="sourceLineNo">218</span>   * @param family column family configuration<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   */<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  public CacheConfig(Configuration conf, HColumnDescriptor family) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.221"></a>
+<span class="sourceLineNo">222</span>        family.isBlockCacheEnabled(),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        family.isInMemory(),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        // For the following flags we enable them regardless of per-schema settings<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // if they are enabled in the global configuration.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>            DEFAULT_CACHE_DATA_ON_WRITE) || family.isCacheDataOnWrite(),<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY,<a name="line.228"></a>
+<span class="sourceLineNo">229</span>            DEFAULT_CACHE_INDEXES_ON_WRITE) || family.isCacheIndexesOnWrite(),<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY,<a name="line.230"></a>
+<span class="sourceLineNo">231</span>            DEFAULT_CACHE_BLOOMS_ON_WRITE) || family.isCacheBloomsOnWrite(),<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY,<a name="line.232"></a>
+<span class="sourceLineNo">233</span>            DEFAULT_EVICT_ON_CLOSE) || family.isEvictBlocksOnClose(),<a name="line.233"></a>
+<span class="sourceLineNo">234</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.234"></a>
+<span class="sourceLineNo">235</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY,<a name="line.235"></a>
+<span class="sourceLineNo">236</span>            DEFAULT_PREFETCH_ON_OPEN) || family.isPrefetchBlocksOnOpen(),<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,<a name="line.237"></a>
+<span class="sourceLineNo">238</span>            HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1) || family.isCacheDataInL1(),<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY,DROP_BEHIND_CACHE_COMPACTION_DEFAULT)<a name="line.239"></a>
+<span class="sourceLineNo">240</span>     );<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /**<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * Create a cache configuration using the specified configuration object and<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * defaults for family level settings.<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * @param conf hbase configuration<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  public CacheConfig(Configuration conf) {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.249"></a>
+<span class="sourceLineNo">250</span>        DEFAULT_CACHE_DATA_ON_READ,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>        DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set<a name="line.251"></a>
+<span class="sourceLineNo">252</span>                           // strictly from conf<a name="line.252"></a>
+<span class="sourceLineNo">253</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),<a name="line.254"></a>
+<span class="sourceLineNo">255</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),<a name="line.256"></a>
+<span class="sourceLineNo">257</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1),<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY,DROP_BEHIND_CACHE_COMPACTION_DEFAULT)<a name="line.261"></a>
+<span class="sourceLineNo">262</span>     );<a name="line.262"></a>
+<span class="sourceLineNo">263</span>  }<a name="line.263"></a>
+<span class="sourceLineNo">264</span><a name="line.264"></a>
+<span class="sourceLineNo">265</span>  /**<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   * Create a block cache configuration with the specified cache and<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * configuration parameters.<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * @param blockCache reference to block cache, null if completely disabled<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   * @param cacheDataOnRead whether DATA blocks should be cached on read (we always cache INDEX<a name="line.269"></a>
+<span class="sourceLineNo">270</span>   * blocks and BLOOM blocks; this cannot be disabled).<a name="line.270"></a>
+<span class="sourceLineNo">271</span>   * @param inMemory whether blocks should be flagged as in-memory<a name="line.271"></a>
+<span class="sourceLineNo">272</span>   * @param cacheDataOnWrite whether data blocks should be cached on write<a name="line.272"></a>
+<span class="sourceLineNo">273</span>   * @param cacheIndexesOnWrite whether index blocks should be cached on write<a name="line.273"></a>
+<span class="sourceLineNo">274</span>   * @param cacheBloomsOnWrite whether blooms should be cached on write<a name="line.274"></a>
+<span class="sourceLineNo">275</span>   * @param evictOnClose whether blocks should be evicted when HFile is closed<a name="line.275"></a>
+<span class="sourceLineNo">276</span>   * @param cacheDataCompressed whether to store blocks as compressed in the cache<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * @param prefetchOnOpen whether to prefetch blocks upon open<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   * @param cacheDataInL1 If more than one cache tier deployed, if true, cache this column families<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   * data blocks up in the L1 tier.<a name="line.279"></a>
+<span class="sourceLineNo">280</span>   */<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  CacheConfig(final BlockCache blockCache,<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      final boolean cacheDataInL1, final boolean dropBehindCompaction) {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    this.blockCache = blockCache;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    this.inMemory = inMemory;<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    this.evictOnClose = evictOnClose;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.295"></a>
+<span class="sourceLineNo">296</span>    this.cacheDataInL1 = cacheDataInL1;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    LOG.info(this);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>  }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>  /**<a name="line.301"></a>
+<span class="sourceLineNo">302</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.302"></a>
+<span class="sourceLineNo">303</span>   * @param cacheConf<a name="line.303"></a>
+<span class="sourceLineNo">304</span>   */<a name="line.304"></a>
+<span class="sourceLineNo">305</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    this(cacheConf.blockCache, cacheConf.cacheDataOnRead, cacheConf.inMemory,<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        cacheConf.cacheDataOnWrite, cacheConf.cacheIndexesOnWrite,<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        cacheConf.cacheBloomsOnWrite, cacheConf.evictOnClose,<a name="line.308"></a>
+<span class="sourceLineNo">309</span>        cacheConf.cacheDataCompressed, cacheConf.prefetchOnOpen,<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        cacheConf.cacheDataInL1, cacheConf.dropBehindCompaction);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Checks whether the block cache is enabled.<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   */<a name="line.315"></a>
+<span class="sourceLineNo">316</span>  public boolean isBlockCacheEnabled() {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    return this.blockCache != null;<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  }<a name="line.318"></a>
+<span class="sourceLineNo">319</span><a name="line.319"></a>
+<span class="sourceLineNo">320</span>  /**<a name="line.320"></a>
+<span class="sourceLineNo">321</span>   * Returns the block cache.<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * @return the block cache, or null if caching is completely disabled<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
+<span class="sourceLineNo">324</span>  public BlockCache getBlockCache() {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return this.blockCache;<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * @return true if blocks should be cached on read, false if not<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   */<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public boolean shouldCacheDataOnRead() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    return isBlockCacheEnabled() &amp;&amp; cacheDataOnRead;<a name="line.334"></a>
+<span class="sourceLineNo">335</span>  }<a name="line.335"></a>
+<span class="sourceLineNo">336</span><a name="line.336"></a>
+<span class="sourceLineNo">337</span>  public boolean shouldDropBehindCompaction() {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    return dropBehindCompaction;<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
+<span class="sourceLineNo">340</span><a name="line.340"></a>
+<span class="sourceLineNo">341</span>  /**<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * Should we cache a block of a particular category? We always cache<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   * available.<a name="line.344"></a>
+<span class="sourceLineNo">345</span>   */<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    return isBlockCacheEnabled()<a name="line.347"></a>
+<span class="sourceLineNo">348</span>        &amp;&amp; (cacheDataOnRead ||<a name="line.348"></a>
+<span class="sourceLineNo">349</span>            category == BlockCategory.INDEX ||<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            category == BlockCategory.BLOOM ||<a name="line.350"></a>
+<span class="sourceLineNo">351</span>            (prefetchOnOpen &amp;&amp;<a name="line.351"></a>
+<span class="sourceLineNo">352</span>                (category != BlockCategory.META &amp;&amp;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>                 category != BlockCategory.UNKNOWN)));<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /**<a name="line.356"></a>
+<span class="sourceLineNo">357</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.357"></a>
+<span class="sourceLineNo">358</span>   */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>  public boolean isInMemory() {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    return isBlockCacheEnabled() &amp;&amp; this.inMemory;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>  }<a name="line.361"></a>
+<span class="sourceLineNo">362</span><a name="line.362"></a>
+<span class="sourceLineNo">363</span>  /**<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   * @return True if cache data blocks in L1 tier (if more than one tier in block cache deploy).<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   */<a name="line.365"></a>
+<span class="sourceLineNo">366</span>  public boolean isCacheDataInL1() {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataInL1;<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  }<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>  /**<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   *         written, false if not<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  public boolean shouldCacheDataOnWrite() {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnWrite;<a name="line.375"></a>
+<span class="sourceLineNo">376</span>  }<a name="line.376"></a>
+<span class="sourceLineNo">377</span><a name="line.377"></a>
+<span class="sourceLineNo">378</span>  /**<a name="line.378"></a>
+<span class="sourceLineNo">379</span>   * Only used for testing.<a name="line.379"></a>
+<span class="sourceLineNo">380</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.380"></a>
+<span class="sourceLineNo">381</span>   *                         when an HFile is written<a name="line.381"></a>
+<span class="sourceLineNo">382</span>   */<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  @VisibleForTesting<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Only used for testing.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param cacheDataInL1 Whether to cache data blocks up in l1 (if a multi-tier cache<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   * implementation).<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
+<span class="sourceLineNo">393</span>  @VisibleForTesting<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  public void setCacheDataInL1(boolean cacheDataInL1) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    this.cacheDataInL1 = cacheDataInL1;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.399"></a>
+<span class="sourceLineNo">400</span>   *         is written, false if not<a name="line.400"></a>
+<span class="sourceLineNo">401</span>   */<a name="line.401"></a>
+<span class="sourceLineNo">402</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.402"></a>
+<span class="sourceLineNo">403</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheIndexesOnWrite;<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  }<a name="line.404"></a>
+<span class="sourceLineNo">405</span><a name="line.405"></a>
+<span class="sourceLineNo">406</span>  /**<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.407"></a>
+<span class="sourceLineNo">408</span>   *         is written, false if not<a name="line.408"></a>
+<span class="sourceLineNo">409</span>   */<a name="line.409"></a>
+<span class="sourceLineNo">410</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheBloomsOnWrite;<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
+<span class="sourceLineNo">413</span><a name="line.413"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   *         reader is closed, false if not<a name="line.416"></a>
+<span class="sourceLineNo">417</span>   */<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  public boolean shouldEvictOnClose() {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    return isBlockCacheEnabled() &amp;&amp; this.evictOnClose;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
+<span class="sourceLineNo">423</span>   * Only used for testing.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.424"></a>
+<span class="sourceLineNo">425</span>   *                     HFile reader is closed<a name="line.425"></a>
+<span class="sourceLineNo">426</span>   */<a name="line.426"></a>
+<span class="sourceLineNo">427</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    this.evictOnClose = evictOnClose;<a name="line.428"></a>
+<span class="sourceLineNo">429</span>  }<a name="line.429"></a>
+<span class="sourceLineNo">430</span><a name="line.430"></a>
+<span class="sourceLineNo">431</span>  /**<a name="line.431"></a>
+<span class="sourceLineNo">432</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.432"></a>
+<span class="sourceLineNo">433</span>   */<a name="line.433"></a>
+<span class="sourceLineNo">434</span>  public boolean shouldCacheDataCompressed() {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataCompressed;<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  }<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>  /**<a name="line.438"></a>
+<span class="sourceLineNo">439</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.439"></a>
+<span class="sourceLineNo">440</span>   */<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.441"></a>
+<span class="sourceLineNo">442</span>    if (!isBlockCacheEnabled()) return false;<a name="line.442"></a>
+<span class="sourceLineNo">443</span>    switch (category) {<a name="line.443"></a>
+<span class="sourceLineNo">444</span>      case DATA:<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        return this.cacheDataCompressed;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>      default:<a name="line.446"></a>
+<span class="sourceLineNo">447</span>        return false;<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    }<a name="line.448"></a>
+<span class="sourceLineNo">449</span>  }<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  public boolean shouldPrefetchOnOpen() {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>    return isBlockCacheEnabled() &amp;&amp; this.prefetchOnOpen;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>  }<a name="line.456"></a>
+<span class="sourceLineNo">457</span><a name="line.457"></a>
+<span class="sourceLineNo">458</span>  /**<a name="line.458"></a>
+<span class="sourceLineNo">459</span>   * Return true if we may find this type of block in block cache.<a name="line.459"></a>
+<span class="sourceLineNo">460</span>   * &lt;p&gt;<a name="line.460"></a>
+<span class="sourceLineNo">461</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.462"></a>
+<span class="sourceLineNo">463</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.463"></a>
+<span class="sourceLineNo">464</span>   * configuration.<a name="line.464"></a>
+<span class="sourceLineNo">465</span>   */<a name="line.465"></a>
+<span class="sourceLineNo">466</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    if (!isBlockCacheEnabled()) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>      return false;<a name="line.468"></a>
 <span class="sourceLineNo">469</span>    }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    if (cacheDataOnWrite) {<a name="line.470"></a>
+<span class="sourceLineNo">470</span>    if (cacheDataOnRead) {<a name="line.470"></a>
 <span class="sourceLineNo">471</span>      return true;<a name="line.471"></a>
 <span class="sourceLineNo">472</span>    }<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    if (blockType == null) {<a name="line.473"></a>
+<span class="sourceLineNo">473</span>    if (prefetchOnOpen) {<a name="line.473"></a>
 <span class="sourceLineNo">474</span>      return true;<a name="line.474"></a>
 <span class="sourceLineNo">475</span>    }<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    if (blockType.getCategory() == BlockCategory.BLOOM ||<a name="line.476"></a>
-<span class="sourceLineNo">477</span>            blockType.getCategory() == BlockCategory.INDEX) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      return true;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    return false;<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  }<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>  /**<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   * If we make sure the block could not be cached, we will not acquire the lock<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * otherwise we will acquire lock<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   */<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  public boolean shouldLockOnCacheMiss(BlockType blockType) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    if (blockType == null) {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      return true;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    }<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    return shouldCacheBlockOnRead(blockType.getCategory());<a name="line.491"></a>
-<span class="sourceLineNo">492</span>  }<a name="line.492"></a>
-<span class="sourceLineNo">493</span><a name="line.493"></a>
-<span class="sourceLineNo">494</span>  @Override<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public String toString() {<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    if (!isBlockCacheEnabled()) {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      return "CacheConfig:disabled";<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    }<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    return "blockCache=" + getBlockCache() +<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      ", cacheDataOnRead=" + shouldCacheDataOnRead() +<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      ", cacheDataOnWrite=" + shouldCacheDataOnWrite() +<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite() +<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() +<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      ", cacheEvictOnClose=" + shouldEvictOnClose() +<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      ", cacheDataCompressed=" + shouldCacheDataCompressed() +<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      ", prefetchOnOpen=" + shouldPrefetchOnOpen();<a name="line.506"></a>
-<span class="sourceLineNo">507</span>  }<a name="line.507"></a>
-<span class="sourceLineNo">508</span><a name="line.508"></a>
-<span class="sourceLineNo">509</span>  // Static block cache reference and methods<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>  /**<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * Static reference to the block cache, or null if no caching should be used<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   * at all.<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   */<a name="line.514"></a>
-<span class="sourceLineNo">515</span>  // Clear this if in tests you'd make more than one block cache instance.<a name="line.515"></a>
-<span class="sourceLineNo">516</span>  @VisibleForTesting<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  static BlockCache GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.517"></a>
-<span class="sourceLineNo">518</span><a name="line.518"></a>
-<span class="sourceLineNo">519</span>  /** Boolean whether we have disabled the block cache entirely. */<a name="line.519"></a>
-<span class="sourceLineNo">520</span>  @VisibleForTesting<a name="line.520"></a>
-<span class="sourceLineNo">521</span>  static boolean blockCacheDisabled = false;<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  static long getLruCacheSize(final Configuration conf, final MemoryUsage mu) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    float cachePercentage = conf.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY,<a name="line.524"></a>
-<span class="sourceLineNo">525</span>      HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT);<a name="line.525"></a>
-<span class="sourceLineNo">526</span>    if (cachePercentage &lt;= 0.0001f) {<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      blockCacheDisabled = true;<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      return -1;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    if (cachePercentage &gt; 1.0) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new IllegalArgumentException(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY +<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        " must be between 0.0 and 1.0, and not &gt; 1.0");<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>    // Calculate the amount of heap to give the heap.<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    return (long) (mu.getMax() * cachePercentage);<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  }<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>  /**<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @param c Configuration to use.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * @param mu JMX Memory Bean<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.542"></a>
-<span class="sourceLineNo">543</span>   */<a name="line.543"></a>
-<span class="sourceLineNo">544</span>  private static LruBlockCache getL1(final Configuration c, final MemoryUsage mu) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    long lruCacheSize = getLruCacheSize(c, mu);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    if (lruCacheSize &lt; 0) return null;<a name="line.546"></a>
-<span class="sourceLineNo">547</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    LOG.info("Allocating LruBlockCache size=" +<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      StringUtils.byteDesc(lruCacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize));<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    return new LruBlockCache(lruCacheSize, blockSize, true, c);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>  }<a name="line.551"></a>
-<span class="sourceLineNo">552</span><a name="line.552"></a>
-<span class="sourceLineNo">553</span>  /**<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * @param c Configuration to use.<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @param mu JMX Memory Bean<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * @return Returns L2 block cache instance (for now it is BucketCache BlockCache all the time)<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * or null if not supposed to be a L2.<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   */<a name="line.558"></a>
-<span class="sourceLineNo">559</span>  private static BlockCache getL2(final Configuration c, final MemoryUsage mu) {<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    if (LOG.isDebugEnabled()) {<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " l2 cache");<a name="line.562"></a>
-<span class="sourceLineNo">563</span>    }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>    // If we want to use an external block cache then create that.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    if (useExternal) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      return getExternalBlockcache(c);<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // otherwise use the bucket cache.<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    return getBucketCache(c, mu);<a name="line.571"></a>
-<span class="sourceLineNo">572</span><a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  private static BlockCache getExternalBlockcache(Configuration c) {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    Class klass = null;<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    // Get the class, from the config. s<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    try {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      klass = ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, "memcache")).clazz;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    } catch (IllegalArgumentException exception) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      try {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        klass = c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(<a name="line.583"></a>
-<span class="sourceLineNo">584</span>            "org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));<a name="line.584"></a>
-<span class="sourceLineNo">585</span>      } catch (ClassNotFoundException e) {<a name="line.585"></a>
-<span class="sourceLineNo">586</span>        return null;<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      }<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>    // Now try and create an instance of the block cache.<a name="line.590"></a>
-<span class="sourceLineNo">5

<TRUNCATED>

[02/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
index 9a60dce..fcaf416 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
@@ -34,1938 +34,1994 @@
 <span class="sourceLineNo">026</span>import java.util.concurrent.locks.Lock;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.27"></a>
 <span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.Cell;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.HConstants;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.io.IOUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>import com.google.common.annotations.VisibleForTesting;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import com.google.common.base.Preconditions;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>/**<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * Reading {@link HFile} version 1 and 2 blocks, and writing version 2 blocks.<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * &lt;ul&gt;<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * &lt;li&gt;In version 1 all blocks are always compressed or uncompressed, as<a name="line.59"></a>
-<span class="sourceLineNo">060</span> * specified by the {@link HFile}'s compression algorithm, with a type-specific<a name="line.60"></a>
-<span class="sourceLineNo">061</span> * magic record stored in the beginning of the compressed data (i.e. one needs<a name="line.61"></a>
-<span class="sourceLineNo">062</span> * to uncompress the compressed block to determine the block type). There is<a name="line.62"></a>
-<span class="sourceLineNo">063</span> * only a single compression algorithm setting for all blocks. Offset and size<a name="line.63"></a>
-<span class="sourceLineNo">064</span> * information from the block index are required to read a block.<a name="line.64"></a>
-<span class="sourceLineNo">065</span> * &lt;li&gt;In version 2 a block is structured as follows:<a name="line.65"></a>
-<span class="sourceLineNo">066</span> * &lt;ul&gt;<a name="line.66"></a>
-<span class="sourceLineNo">067</span> * &lt;li&gt;header (see Writer#finishBlock())<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * &lt;ul&gt;<a name="line.68"></a>
-<span class="sourceLineNo">069</span> * &lt;li&gt;Magic record identifying the block type (8 bytes)<a name="line.69"></a>
-<span class="sourceLineNo">070</span> * &lt;li&gt;Compressed block size, excluding header, including checksum (4 bytes)<a name="line.70"></a>
-<span class="sourceLineNo">071</span> * &lt;li&gt;Uncompressed block size, excluding header, excluding checksum (4 bytes)<a name="line.71"></a>
-<span class="sourceLineNo">072</span> * &lt;li&gt;The offset of the previous block of the same type (8 bytes). This is<a name="line.72"></a>
-<span class="sourceLineNo">073</span> * used to be able to navigate to the previous block without going to the block<a name="line.73"></a>
-<span class="sourceLineNo">074</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.74"></a>
-<span class="sourceLineNo">075</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.75"></a>
-<span class="sourceLineNo">076</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the size of data on disk, including header,<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * excluding checksums (4 bytes)<a name="line.77"></a>
-<span class="sourceLineNo">078</span> * &lt;/ul&gt;<a name="line.78"></a>
-<span class="sourceLineNo">079</span> * &lt;/li&gt;<a name="line.79"></a>
-<span class="sourceLineNo">080</span> * &lt;li&gt;Raw/Compressed/Encrypted/Encoded data. The compression algorithm is the<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * same for all the blocks in the {@link HFile}, similarly to what was done in<a name="line.81"></a>
-<span class="sourceLineNo">082</span> * version 1.<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * &lt;li&gt;For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * the number of bytes specified by bytesPerChecksum.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * &lt;/ul&gt;<a name="line.85"></a>
-<span class="sourceLineNo">086</span> * &lt;/ul&gt;<a name="line.86"></a>
-<span class="sourceLineNo">087</span> */<a name="line.87"></a>
-<span class="sourceLineNo">088</span>@InterfaceAudience.Private<a name="line.88"></a>
-<span class="sourceLineNo">089</span>public class HFileBlock implements Cacheable {<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>  /**<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * On a checksum failure on a Reader, these many suceeding read<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * requests switch back to using hdfs checksums before auto-reenabling<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   * hbase checksum verification.<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final boolean FILL_HEADER = true;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  /**<a name="line.101"></a>
-<span class="sourceLineNo">102</span>   * The size of block header when blockType is {@link BlockType#ENCODED_DATA}.<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   * This extends normal header by adding the id of encoder.<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   */<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  public static final int ENCODED_HEADER_SIZE = HConstants.HFILEBLOCK_HEADER_SIZE<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      + DataBlockEncoding.ID_SIZE;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.108"></a>
-<span class="sourceLineNo">109</span>     new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>  // meta.usesHBaseChecksum+offset+nextBlockOnDiskSizeWithHeader<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      + Bytes.SIZEOF_LONG;<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  /**<a name="line.119"></a>
-<span class="sourceLineNo">120</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  static final CacheableDeserializer&lt;Cacheable&gt; blockDeserializer =<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.126"></a>
-<span class="sourceLineNo">127</span>            throws IOException {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>          buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();<a name="line.128"></a>
-<span class="sourceLineNo">129</span>          ByteBuff newByteBuffer;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          if (reuse) {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>            newByteBuffer = buf.slice();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>          } else {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>            // Used only in tests<a name="line.133"></a>
-<span class="sourceLineNo">134</span>            int len = buf.limit();<a name="line.134"></a>
-<span class="sourceLineNo">135</span>            newByteBuffer = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.135"></a>
-<span class="sourceLineNo">136</span>            newByteBuffer.put(0, buf, buf.position(), len);<a name="line.136"></a>
-<span class="sourceLineNo">137</span>          }<a name="line.137"></a>
-<span class="sourceLineNo">138</span>          buf.position(buf.limit());<a name="line.138"></a>
-<span class="sourceLineNo">139</span>          buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>          boolean usesChecksum = buf.get() == (byte)1;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>          HFileBlock hFileBlock = new HFileBlock(newByteBuffer, usesChecksum, memType);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>          hFileBlock.offset = buf.getLong();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>          hFileBlock.nextBlockOnDiskSizeWithHeader = buf.getInt();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>          if (hFileBlock.hasNextBlockHeader()) {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>            hFileBlock.buf.limit(hFileBlock.buf.limit() - hFileBlock.headerSize());<a name="line.145"></a>
-<span class="sourceLineNo">146</span>          }<a name="line.146"></a>
-<span class="sourceLineNo">147</span>          return hFileBlock;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>        @Override<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        public int getDeserialiserIdentifier() {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>          return deserializerIdentifier;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>        @Override<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>          // Used only in tests<a name="line.157"></a>
-<span class="sourceLineNo">158</span>          return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        }<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      };<a name="line.160"></a>
-<span class="sourceLineNo">161</span>  private static final int deserializerIdentifier;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  static {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    deserializerIdentifier = CacheableDeserializerIdManager<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        .registerDeserializer(blockDeserializer);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  }<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>  /** Type of block. Header field 0. */<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private BlockType blockType;<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>  /** Size on disk excluding header, including checksum. Header field 1. */<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  private int onDiskSizeWithoutHeader;<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /** Size of pure data. Does not include header or checksums. Header field 2. */<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  private final int uncompressedSizeWithoutHeader;<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  /** The offset of the previous block on disk. Header field 3. */<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  private final long prevBlockOffset;<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  /**<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private final int onDiskDataSizeWithHeader;<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /** The in-memory representation of the hfile block */<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private ByteBuff buf;<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span>  /** Meta data that holds meta information on the hfileblock */<a name="line.188"></a>
-<span class="sourceLineNo">189</span>  private HFileContext fileContext;<a name="line.189"></a>
+<span class="sourceLineNo">029</span>import org.apache.commons.logging.Log;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.commons.logging.LogFactory;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.fs.Path;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.Cell;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.HConstants;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.classification.InterfaceAudience;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.ByteArrayOutputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.ByteBuffInputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.ByteBufferSupportDataOutputStream;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.nio.ByteBuff;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.nio.MultiByteBuff;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.nio.SingleByteBuff;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.ChecksumType;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.io.IOUtils;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>import com.google.common.annotations.VisibleForTesting;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import com.google.common.base.Preconditions;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>/**<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * Reads {@link HFile} version 1 and version 2 blocks but writes version 2 blocks only.<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * Version 2 was introduced in hbase-0.92.0. Does read and write out to the filesystem but also<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * the read and write to Cache.<a name="line.61"></a>
+<span class="sourceLineNo">062</span> *<a name="line.62"></a>
+<span class="sourceLineNo">063</span> * &lt;h3&gt;HFileBlock: Version 1&lt;/h3&gt;<a name="line.63"></a>
+<span class="sourceLineNo">064</span> * As of this writing, there should be no more version 1 blocks found out in the wild. Version 2<a name="line.64"></a>
+<span class="sourceLineNo">065</span> * as introduced in hbase-0.92.0.<a name="line.65"></a>
+<span class="sourceLineNo">066</span> * In version 1 all blocks are always compressed or uncompressed, as<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * specified by the {@link HFile}'s compression algorithm, with a type-specific<a name="line.67"></a>
+<span class="sourceLineNo">068</span> * magic record stored in the beginning of the compressed data (i.e. one needs<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * to uncompress the compressed block to determine the block type). There is<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * only a single compression algorithm setting for all blocks. Offset and size<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * information from the block index are required to read a block.<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * &lt;h3&gt;HFileBlock: Version 2&lt;/h3&gt;<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * In version 2, a block is structured as follows:<a name="line.73"></a>
+<span class="sourceLineNo">074</span> * &lt;ul&gt;<a name="line.74"></a>
+<span class="sourceLineNo">075</span> * &lt;li&gt;&lt;b&gt;Header:&lt;/b&gt; See Writer#putHeader(); header total size is HFILEBLOCK_HEADER_SIZE)<a name="line.75"></a>
+<span class="sourceLineNo">076</span> * &lt;ul&gt;<a name="line.76"></a>
+<span class="sourceLineNo">077</span> * &lt;li&gt;Magic record identifying the {@link BlockType} (8 bytes): e.g. &lt;code&gt;DATABLK*&lt;/code&gt;<a name="line.77"></a>
+<span class="sourceLineNo">078</span> * &lt;li&gt;Compressed -- a.k.a 'on disk' -- block size, excluding header, but including<a name="line.78"></a>
+<span class="sourceLineNo">079</span> *     tailing checksum bytes (4 bytes)<a name="line.79"></a>
+<span class="sourceLineNo">080</span> * &lt;li&gt;Uncompressed block size, excluding header, and excluding checksum bytes (4 bytes)<a name="line.80"></a>
+<span class="sourceLineNo">081</span> * &lt;li&gt;The offset of the previous block of the same type (8 bytes). This is<a name="line.81"></a>
+<span class="sourceLineNo">082</span> * used to navigate to the previous block without having to go to the block index<a name="line.82"></a>
+<span class="sourceLineNo">083</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the ordinal describing checksum type (1 byte)<a name="line.83"></a>
+<span class="sourceLineNo">084</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the number of data bytes/checksum chunk (4 bytes)<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * &lt;li&gt;For minorVersions &amp;gt;=1, the size of data 'on disk', including header,<a name="line.85"></a>
+<span class="sourceLineNo">086</span> * excluding checksums (4 bytes)<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * &lt;/ul&gt;<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * &lt;/li&gt;<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * &lt;li&gt;&lt;b&gt;Raw/Compressed/Encrypted/Encoded data:&lt;/b&gt; The compression algorithm is the<a name="line.89"></a>
+<span class="sourceLineNo">090</span> * same for all the blocks in the {@link HFile}, similarly to what was done in<a name="line.90"></a>
+<span class="sourceLineNo">091</span> * version 1. If compression is NONE, this is just raw, serialized Cells.<a name="line.91"></a>
+<span class="sourceLineNo">092</span> * &lt;li&gt;&lt;b&gt;Tail:&lt;/b&gt; For minorVersions &amp;gt;=1, a series of 4 byte checksums, one each for<a name="line.92"></a>
+<span class="sourceLineNo">093</span> * the number of bytes specified by bytesPerChecksum.<a name="line.93"></a>
+<span class="sourceLineNo">094</span> * &lt;/ul&gt;<a name="line.94"></a>
+<span class="sourceLineNo">095</span> * &lt;p&gt;Be aware that when we read from HDFS, we overread pulling in the next blocks' header too.<a name="line.95"></a>
+<span class="sourceLineNo">096</span> * We do this to save having to do two seeks to read an HFileBlock; a seek to read the header<a name="line.96"></a>
+<span class="sourceLineNo">097</span> * to figure lengths, etc., and then another seek to pull in the data.<a name="line.97"></a>
+<span class="sourceLineNo">098</span> */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>@InterfaceAudience.Private<a name="line.99"></a>
+<span class="sourceLineNo">100</span>public class HFileBlock implements Cacheable {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final Log LOG = LogFactory.getLog(HFileBlock.class);<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  /**<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   * On a checksum failure, do these many succeeding read requests using hdfs checksums before<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * auto-reenabling hbase checksum verification.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   */<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;<a name="line.107"></a>
+<span class="sourceLineNo">108</span><a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static int UNSET = -1;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  public static final boolean FILL_HEADER = true;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  public static final boolean DONT_FILL_HEADER = false;<a name="line.111"></a>
+<span class="sourceLineNo">112</span><a name="line.112"></a>
+<span class="sourceLineNo">113</span>  // How to get the estimate correctly? if it is a singleBB?<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  public static final int MULTI_BYTE_BUFFER_HEAP_SIZE =<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      (int)ClassSize.estimateBase(MultiByteBuff.class, false);<a name="line.115"></a>
+<span class="sourceLineNo">116</span><a name="line.116"></a>
+<span class="sourceLineNo">117</span>  /**<a name="line.117"></a>
+<span class="sourceLineNo">118</span>   * See #blockDeserializer method for more info.<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * 13 bytes of extra stuff stuck on the end of the HFileBlock that we pull in from HDFS (note,<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * when we read from HDFS, we pull in an HFileBlock AND the header of the next block if one).<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   * The 13 bytes are: usesHBaseChecksum (1 byte) + offset of this block (long) +<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * nextBlockOnDiskSizeWithHeader (int).<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   */<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public static final int EXTRA_SERIALIZATION_SPACE =<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT + Bytes.SIZEOF_LONG;<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>  /**<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   * Each checksum value is an integer that can be stored in 4 bytes.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   */<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  static final byte[] DUMMY_HEADER_NO_CHECKSUM =<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  /**<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * Used deserializing blocks from Cache.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   *<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * Serializing to cache is a little hard to follow. See Writer#finishBlock for where it is done.<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   * When we start to append to a new HFileBlock,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>   * we skip over where the header should go before we start adding Cells. When the block is<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * done, we'll then go back and fill in the header and the checksum tail. Be aware that what<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   * gets serialized into the blockcache is a byte array that contains an HFileBlock followed by<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   * its checksums and then the header of the next HFileBlock (needed to help navigate), followed<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   * again by an extra 13 bytes of meta info needed when time to recreate the HFileBlock from cache.<a name="line.144"></a>
+<span class="sourceLineNo">145</span>   *<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   * ++++++++++++++<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   * + HFileBlock +<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   * ++++++++++++++<a name="line.148"></a>
+<span class="sourceLineNo">149</span>   * + Checksums  +<a name="line.149"></a>
+<span class="sourceLineNo">150</span>   * ++++++++++++++<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * + NextHeader +<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * ++++++++++++++<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * + ExtraMeta! +<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * ++++++++++++++<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   *<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * TODO: Fix it so we do NOT put the NextHeader into blockcache. It is not necessary.<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  static final CacheableDeserializer&lt;Cacheable&gt; blockDeserializer =<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      new CacheableDeserializer&lt;Cacheable&gt;() {<a name="line.159"></a>
+<span class="sourceLineNo">160</span>        public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)<a name="line.160"></a>
+<span class="sourceLineNo">161</span>        throws IOException {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>          // Rewind to just before the EXTRA_SERIALIZATION_SPACE.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>          buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>          // Get a new buffer to pass the deserialized HFileBlock for it to 'own'.<a name="line.164"></a>
+<span class="sourceLineNo">165</span>          ByteBuff newByteBuffer;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          if (reuse) {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>            newByteBuffer = buf.slice();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          } else {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>            int len = buf.limit();<a name="line.169"></a>
+<span class="sourceLineNo">170</span>            newByteBuffer = new SingleByteBuff(ByteBuffer.allocate(len));<a name="line.170"></a>
+<span class="sourceLineNo">171</span>            newByteBuffer.put(0, buf, buf.position(), len);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>          }<a name="line.172"></a>
+<span class="sourceLineNo">173</span>          // Read out the EXTRA_SERIALIZATION_SPACE content and shove into our HFileBlock.<a name="line.173"></a>
+<span class="sourceLineNo">174</span>          buf.position(buf.limit());<a name="line.174"></a>
+<span class="sourceLineNo">175</span>          buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);<a name="line.175"></a>
+<span class="sourceLineNo">176</span>          boolean usesChecksum = buf.get() == (byte)1;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>          HFileBlock hFileBlock = new HFileBlock(newByteBuffer, usesChecksum, memType);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>          hFileBlock.offset = buf.getLong();<a name="line.178"></a>
+<span class="sourceLineNo">179</span>          hFileBlock.nextBlockOnDiskSizeWithHeader = buf.getInt();<a name="line.179"></a>
+<span class="sourceLineNo">180</span>          if (hFileBlock.hasNextBlockHeader()) {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>            hFileBlock.buf.limit(hFileBlock.buf.limit() - hFileBlock.headerSize());<a name="line.181"></a>
+<span class="sourceLineNo">182</span>          }<a name="line.182"></a>
+<span class="sourceLineNo">183</span>          return hFileBlock;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>        @Override<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        public int getDeserialiserIdentifier() {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>          return deserializerIdentifier;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>        }<a name="line.189"></a>
 <span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>  /**<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * The offset of this block in the file. Populated by the reader for<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * convenience of access. This offset is not part of the block header.<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  private long offset = -1;<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * The on-disk size of the next block, including the header, obtained by<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * peeking into the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the next block's<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * header, or -1 if unknown.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  private int nextBlockOnDiskSizeWithHeader = -1;<a name="line.202"></a>
+<span class="sourceLineNo">191</span>        @Override<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        public HFileBlock deserialize(ByteBuff b) throws IOException {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>          // Used only in tests<a name="line.193"></a>
+<span class="sourceLineNo">194</span>          return deserialize(b, false, MemoryType.EXCLUSIVE);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>        }<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      };<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  private static final int deserializerIdentifier;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  static {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    deserializerIdentifier = CacheableDeserializerIdManager<a name="line.200"></a>
+<span class="sourceLineNo">201</span>        .registerDeserializer(blockDeserializer);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
 <span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   * is mostly used when the block data has already been read and uncompressed,<a name="line.208"></a>
-<span class="sourceLineNo">209</span>   * and is sitting in a byte buffer.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>   *<a name="line.210"></a>
-<span class="sourceLineNo">211</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.213"></a>
-<span class="sourceLineNo">214</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.214"></a>
-<span class="sourceLineNo">215</span>   * @param buf block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes) followed by<a name="line.215"></a>
-<span class="sourceLineNo">216</span>   *          uncompressed data. This<a name="line.216"></a>
-<span class="sourceLineNo">217</span>   * @param fillHeader when true, parse {@code buf} and override the first 4 header fields.<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * @param offset the file offset the block was read from<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.219"></a>
-<span class="sourceLineNo">220</span>   * @param fileContext HFile meta data<a name="line.220"></a>
-<span class="sourceLineNo">221</span>   */<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      long prevBlockOffset, ByteBuff buf, boolean fillHeader, long offset,<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    this.blockType = blockType;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    this.buf = buf;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    this.offset = offset;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    this.fileContext = fileContext;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    if (fillHeader)<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      overwriteHeader();<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    this.buf.rewind();<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  }<a name="line.236"></a>
+<span class="sourceLineNo">204</span>  /** Type of block. Header field 0. */<a name="line.204"></a>
+<span class="sourceLineNo">205</span>  private BlockType blockType;<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  /**<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * Size on disk excluding header, including checksum. Header field 1.<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   */<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  private int onDiskSizeWithoutHeader;<a name="line.211"></a>
+<span class="sourceLineNo">212</span><a name="line.212"></a>
+<span class="sourceLineNo">213</span>  /**<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   * Size of pure data. Does not include header or checksums. Header field 2.<a name="line.214"></a>
+<span class="sourceLineNo">215</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.215"></a>
+<span class="sourceLineNo">216</span>   */<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  private final int uncompressedSizeWithoutHeader;<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  /**<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   * The offset of the previous block on disk. Header field 3.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   */<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  private final long prevBlockOffset;<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  /**<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   * Size on disk of header + data. Excludes checksum. Header field 6,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * OR calculated from {@link #onDiskSizeWithoutHeader} when using HDFS checksum.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>   * @see Writer#putHeader(byte[], int, int, int, int)<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private final int onDiskDataSizeWithHeader;<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>  /** The in-memory representation of the hfile block */<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  private ByteBuff buf;<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>  /** Meta data that holds meta information on the hfileblock */<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private HFileContext fileContext;<a name="line.236"></a>
 <span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    this(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset,<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        new SingleByteBuff(buf), fillHeader, offset, onDiskDataSizeWithHeader, fileContext);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  }<a name="line.243"></a>
-<span class="sourceLineNo">244</span><a name="line.244"></a>
-<span class="sourceLineNo">245</span>  /**<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.246"></a>
-<span class="sourceLineNo">247</span>   */<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  HFileBlock(HFileBlock that) {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    this.blockType = that.blockType;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    this.prevBlockOffset = that.prevBlockOffset;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    this.buf = that.buf.duplicate();<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    this.offset = that.offset;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    this.fileContext = that.fileContext;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
-<span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>  HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    this(new SingleByteBuff(b), usesHBaseChecksum);<a name="line.261"></a>
-<span class="sourceLineNo">262</span>  }<a name="line.262"></a>
-<span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * to that point.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum) throws IOException {<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    this(b, usesHBaseChecksum, MemoryType.EXCLUSIVE);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>  /**<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * to that point.<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum, MemoryType memType) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    b.rewind();<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    blockType = BlockType.read(b);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    onDiskSizeWithoutHeader = b.getInt();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    uncompressedSizeWithoutHeader = b.getInt();<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    prevBlockOffset = b.getLong();<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    HFileContextBuilder contextBuilder = new HFileContextBuilder();<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    contextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    if (usesHBaseChecksum) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>      contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      contextBuilder.withBytesPerCheckSum(b.getInt());<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      this.onDiskDataSizeWithHeader = b.getInt();<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    } else {<a name="line.292"></a>
-<span class="sourceLineNo">293</span>      contextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      contextBuilder.withBytesPerCheckSum(0);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader +<a name="line.295"></a>
-<span class="sourceLineNo">296</span>                                       HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    this.fileContext = contextBuilder.build();<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    this.memType = memType;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    buf = b;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    buf.rewind();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
-<span class="sourceLineNo">303</span><a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public BlockType getBlockType() {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    return blockType;<a name="line.305"></a>
+<span class="sourceLineNo">238</span>  /**<a name="line.238"></a>
+<span class="sourceLineNo">239</span>   * The offset of this block in the file. Populated by the reader for<a name="line.239"></a>
+<span class="sourceLineNo">240</span>   * convenience of access. This offset is not part of the block header.<a name="line.240"></a>
+<span class="sourceLineNo">241</span>   */<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  private long offset = UNSET;<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span>  /**<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * The on-disk size of the next block, including the header, obtained by<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   * peeking into the first {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the next block's<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   * header, or -1 if unknown.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>   */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  private int nextBlockOnDiskSizeWithHeader = UNSET;<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private MemoryType memType = MemoryType.EXCLUSIVE;<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>  /**<a name="line.253"></a>
+<span class="sourceLineNo">254</span>   * Creates a new {@link HFile} block from the given fields. This constructor<a name="line.254"></a>
+<span class="sourceLineNo">255</span>   * is used when the block data has already been read and uncompressed,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>   * and is sitting in a byte buffer.<a name="line.256"></a>
+<span class="sourceLineNo">257</span>   *<a name="line.257"></a>
+<span class="sourceLineNo">258</span>   * @param blockType the type of this block, see {@link BlockType}<a name="line.258"></a>
+<span class="sourceLineNo">259</span>   * @param onDiskSizeWithoutHeader see {@link #onDiskSizeWithoutHeader}<a name="line.259"></a>
+<span class="sourceLineNo">260</span>   * @param uncompressedSizeWithoutHeader see {@link #uncompressedSizeWithoutHeader}<a name="line.260"></a>
+<span class="sourceLineNo">261</span>   * @param prevBlockOffset see {@link #prevBlockOffset}<a name="line.261"></a>
+<span class="sourceLineNo">262</span>   * @param buf block header ({@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes) followed by<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   *          uncompressed data.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @param fillHeader when true, write the first 4 header fields into passed buffer.<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @param offset the file offset the block was read from<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   * @param onDiskDataSizeWithHeader see {@link #onDiskDataSizeWithHeader}<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * @param fileContext HFile meta data<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   */<a name="line.268"></a>
+<span class="sourceLineNo">269</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      long prevBlockOffset, ByteBuff buf, boolean fillHeader, long offset,<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    this.blockType = blockType;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    this.prevBlockOffset = prevBlockOffset;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    this.buf = buf;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    this.offset = offset;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    this.fileContext = fileContext;<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    if (fillHeader) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      overwriteHeader();<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    this.buf.rewind();<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      int onDiskDataSizeWithHeader, HFileContext fileContext) {<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    this(blockType, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, prevBlockOffset,<a name="line.289"></a>
+<span class="sourceLineNo">290</span>        new SingleByteBuff(buf), fillHeader, offset, onDiskDataSizeWithHeader, fileContext);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>  /**<a name="line.293"></a>
+<span class="sourceLineNo">294</span>   * Copy constructor. Creates a shallow copy of {@code that}'s buffer.<a name="line.294"></a>
+<span class="sourceLineNo">295</span>   */<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  HFileBlock(HFileBlock that) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    this.blockType = that.blockType;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    this.prevBlockOffset = that.prevBlockOffset;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    this.buf = that.buf.duplicate();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    this.offset = that.offset;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    this.fileContext = that.fileContext;<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;<a name="line.305"></a>
 <span class="sourceLineNo">306</span>  }<a name="line.306"></a>
 <span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  public short getDataBlockEncodingId() {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.311"></a>
-<span class="sourceLineNo">312</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    return buf.getShort(headerSize());<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  }<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>  /**<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @return the on-disk size of header + data part + checksum.<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   */<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  public int getOnDiskSizeWithHeader() {<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    return onDiskSizeWithoutHeader + headerSize();<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>  /**<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @return the on-disk size of the data part + checksum (header excluded).<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   */<a name="line.326"></a>
-<span class="sourceLineNo">327</span>  public int getOnDiskSizeWithoutHeader() {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    return onDiskSizeWithoutHeader;<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  }<a name="line.329"></a>
-<span class="sourceLineNo">330</span><a name="line.330"></a>
-<span class="sourceLineNo">331</span>  /**<a name="line.331"></a>
-<span class="sourceLineNo">332</span>   * @return the uncompressed size of data part (header and checksum excluded).<a name="line.332"></a>
-<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   public int getUncompressedSizeWithoutHeader() {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    return uncompressedSizeWithoutHeader;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>  }<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * @return the offset of the previous block of the same type in the file, or<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   *         -1 if unknown<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public long getPrevBlockOffset() {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    return prevBlockOffset;<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * Rewinds {@code buf} and writes first 4 header fields. {@code buf} position<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   * is modified as side-effect.<a name="line.348"></a>
-<span class="sourceLineNo">349</span>   */<a name="line.349"></a>
-<span class="sourceLineNo">350</span>  private void overwriteHeader() {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    buf.rewind();<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    blockType.write(buf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    buf.putInt(onDiskSizeWithoutHeader);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    buf.putInt(uncompressedSizeWithoutHeader);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    buf.putLong(prevBlockOffset);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      buf.put(fileContext.getChecksumType().getCode());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      buf.putInt(fileContext.getBytesPerChecksum());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      buf.putInt(onDiskDataSizeWithHeader);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span>  }<a name="line.361"></a>
-<span class="sourceLineNo">362</span><a name="line.362"></a>
-<span class="sourceLineNo">363</span>  /**<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * Returns a buffer that does not include the header or checksum.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   *<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   * @return the buffer with header skipped and checksum omitted.<a name="line.366"></a>
+<span class="sourceLineNo">308</span>  HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    this(new SingleByteBuff(b), usesHBaseChecksum);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * to that point.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum) throws IOException {<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    this(b, usesHBaseChecksum, MemoryType.EXCLUSIVE);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * Creates a block from an existing buffer starting with a header. Rewinds<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * and takes ownership of the buffer. By definition of rewind, ignores the<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   * buffer position, but if you slice the buffer beforehand, it will rewind<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * to that point.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   */<a name="line.327"></a>
+<span class="sourceLineNo">328</span>  HFileBlock(ByteBuff b, boolean usesHBaseChecksum, MemoryType memType) throws IOException {<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    b.rewind();<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    blockType = BlockType.read(b);<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    onDiskSizeWithoutHeader = b.getInt();<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    uncompressedSizeWithoutHeader = b.getInt();<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    prevBlockOffset = b.getLong();<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    HFileContextBuilder contextBuilder = new HFileContextBuilder();<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    contextBuilder.withHBaseCheckSum(usesHBaseChecksum);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    if (usesHBaseChecksum) {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>      contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>      contextBuilder.withBytesPerCheckSum(b.getInt());<a name="line.338"></a>
+<span class="sourceLineNo">339</span>      this.onDiskDataSizeWithHeader = b.getInt();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    } else {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      contextBuilder.withChecksumType(ChecksumType.NULL);<a name="line.341"></a>
+<span class="sourceLineNo">342</span>      contextBuilder.withBytesPerCheckSum(0);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      this.onDiskDataSizeWithHeader =<a name="line.343"></a>
+<span class="sourceLineNo">344</span>          onDiskSizeWithoutHeader + HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    }<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    this.fileContext = contextBuilder.build();<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    this.memType = memType;<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    buf = b;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    buf.rewind();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>  }<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>  public BlockType getBlockType() {<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    return blockType;<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>  /** @return get data block encoding id that was used to encode this block */<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  public short getDataBlockEncodingId() {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    if (blockType != BlockType.ENCODED_DATA) {<a name="line.358"></a>
+<span class="sourceLineNo">359</span>      throw new IllegalArgumentException("Querying encoder ID of a block " +<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    }<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return buf.getShort(headerSize());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return the on-disk size of header + data part + checksum.<a name="line.366"></a>
 <span class="sourceLineNo">367</span>   */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public ByteBuff getBufferWithoutHeader() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ByteBuff dup = this.buf.duplicate();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    dup.position(headerSize());<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    dup.limit(buf.limit() - totalChecksumBytes());<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    return dup.slice();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>  }<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>  /**<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * Returns the buffer this block stores internally. The clients must not<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * modify the buffer object. This method has to be public because it is used<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * in {@link CompoundBloomFilter} to avoid object creation on every Bloom<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * filter lookup, but has to be used with caution. Checksum data is not<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * included in the returned buffer but header data is.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   *<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * @return the buffer of this block for read-only operations<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public ByteBuff getBufferReadOnly() {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    ByteBuff dup = this.buf.duplicate();<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    dup.limit(buf.limit() - totalChecksumBytes());<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    return dup.slice();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>  /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>   * Returns the buffer of this block, including header data. The clients must<a name="line.391"></a>
-<span class="sourceLineNo">392</span>   * not modify the buffer object. This method has to be public because it is<a name="line.392"></a>
-<span class="sourceLineNo">393</span>   * used in {@link org.apache.hadoop.hbase.io.hfile.bucket.BucketCache} to avoid buffer copy.<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   *<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   * @return the buffer with header and checksum included for read-only operations<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   */<a name="line.396"></a>
-<span class="sourceLineNo">397</span>  public ByteBuff getBufferReadOnlyWithHeader() {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    ByteBuff dup = this.buf.duplicate();<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    return dup.slice();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>  /**<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   * Returns a byte buffer of this block, including header data and checksum, positioned at<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * the beginning of header. The underlying data array is not copied.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   *<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @return the byte buffer with header and checksum included<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  ByteBuff getBufferWithHeader() {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    ByteBuff dupBuf = buf.duplicate();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    dupBuf.rewind();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    return dupBuf;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  private void sanityCheckAssertion(long valueFromBuf, long valueFromField,<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      String fieldName) throws IOException {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    if (valueFromBuf != valueFromField) {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf<a name="line.417"></a>
-<span class="sourceLineNo">418</span>          + ") is different from that in the field (" + valueFromField + ")");<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    }<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>  private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)<a name="line.422"></a>
-<span class="sourceLineNo">423</span>      throws IOException {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (valueFromBuf != valueFromField) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      throw new IOException("Block type stored in the buffer: " +<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        valueFromBuf + ", block type field: " + valueFromField);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * Checks if the block is internally consistent, i.e. the first<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * {@link HConstants#HFILEBLOCK_HEADER_SIZE} bytes of the buffer contain a<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * valid header consistent with the fields. Assumes a packed block structure.<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   * This function is primary for testing and debugging, and is not<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * thread-safe, because it alters the internal buffer pointer.<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  void sanityCheck() throws IOException {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    buf.rewind();<a name="line.438"></a>
-<span class="sourceLineNo">439</span><a name="line.439"></a>
-<span class="sourceLineNo">440</span>    sanityCheckAssertion(BlockType.read(buf), blockType);<a name="line.440"></a>
-<span class="sourceLineNo">441</span><a name="line.441"></a>
-<span class="sourceLineNo">442</span>    sanityCheckAssertion(buf.getInt(), onDiskSizeWithoutHeader,<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        "onDiskSizeWithoutHeader");<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    sanityCheckAssertion(buf.getInt(), uncompressedSizeWithoutHeader,<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        "uncompressedSizeWithoutHeader");<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>    sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset");<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    if (this.fileContext.isUseHBaseChecksum()) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType");<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(),<a name="line.451"></a>
-<span class="sourceLineNo">452</span>          "bytesPerChecksum");<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    }<a name="line.454"></a>
-<span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>    int cksumBytes = totalChecksumBytes();<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    if (buf.limit() != expectedBufLimit) {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>      throw new AssertionError("Expected buffer limit " + expectedBufLimit<a name="line.459"></a>
-<span class="sourceLineNo">460</span>          + ", got " + buf.limit());<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>    // We might optionally allocate HFILEBLOCK_HEADER_SIZE more bytes to read the next<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    // block's header, so there are two sensible values for buffer capacity.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    int hdrSize = headerSize();<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    if (buf.capacity() != expectedBufLimit &amp;&amp;<a name="line.466"></a>
-<span class="sourceLineNo">467</span>        buf.capacity() != expectedBufLimit + hdrSize) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      throw new AssertionError("Invalid buffer capacity: " + buf.capacity() +<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    }<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>  @Override<a name="line.473"></a>
-<span class="sourceLineNo">474</span>  public String toString() {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    StringBuilder sb = new StringBuilder()<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      .append("HFileBlock [")<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      .append(" fileOffset=").append(offset)<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      .append(" headerSize()=").append(headerSize())<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      .append(" blockType=").append(blockType)<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      .append(" onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      .append(" uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      .append(" prevBlockOffset=").append(prevBlockOffset)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      .append(" isUseHBaseChecksum()=").append(fileContext.isUseHBaseChecksum());<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    if (fileContext.isUseHBaseChecksum()) {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      sb.append(" checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        .append(" bytesPerChecksum=").append(this.buf.getInt(24 + 1))<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        .append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    } else {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      sb.append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        .append("(").append(onDiskSizeWithoutHeader)<a name="line.490"></a>
-<span class="sourceLineNo">491</span>        .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    String dataBegin = null;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    if (buf.hasArray()) {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    } else {<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      ByteBuff bufWithoutHeader = getBufferWithoutHeader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      byte[] dataBeginBytes = new byte[Math.min(32,<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          bufWithoutHeader.limit() - bufWithoutHeader.position())];<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      bufWithoutHeader.get(dataBeginBytes);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    }<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    sb.append(" getOnDiskSizeWithHeader()=").append(getOnDiskSizeWithHeader())<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      .append(" totalChecksumBytes()=").append(totalChecksumBytes())<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      .append(" isUnpacked()=").append(isUnpacked())<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      .append(" buf=[ ").append(buf).append(" ]")<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      .append(" dataBeginsWith=").append(dataBegin)<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      .append(" fileContext=").append(fileContext)<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      .append(" ]");<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    return sb.toString();<a name="line.511"></a>
-<span class="sourceLineNo">512</span>  }<a name="line.512"></a>
-<span class="sourceLineNo">513</span><a name="line.513"></a>
-<span class="sourceLineNo">514</span>  /**<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * Called after reading a block with provided onDiskSizeWithHeader.<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   */<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  private void validateOnDiskSizeWithoutHeader(int expectedOnDiskSizeWithoutHeader)<a name="line.517"></a>
-<span class="sourceLineNo">518</span>  throws IOException {<a name="line.518"></a>
-<span class="sourceLineNo">519</span>    if (onDiskSizeWithoutHeader != expectedOnDiskSizeWithoutHeader) {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      String dataBegin = null;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>      if (buf.hasArray()) {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>        dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset(), Math.min(32, buf.limit()));<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      } else {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        ByteBuff bufDup = getBufferReadOnly();<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        byte[] dataBeginBytes = new byte[Math.min(32, bufDup.limit() - bufDup.position())];<a name="line.525"></a>
-<span class="sourceLineNo">526</span>        bufDup.get(dataBeginBytes);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        dataBegin = Bytes.toStringBinary(dataBeginBytes);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      }<a name="line.528"></a>
-<span class="sourceLineNo">529</span>      String blockInfoMsg =<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        "Block offset: " + offset + ", data starts with: " + dataBegin;<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      throw new IOException("On-disk size without header provided is "<a name="line.531"></a>
-<span class="sourceLineNo">532</span>          + expectedOnDiskSizeWithoutHeader + ", but block "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>          + "header contains " + onDiskSizeWithoutHeader + ". " +<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          blockInfoMsg);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  }<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>  /**<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * encoded structure. Internal structures are shared between instances where applicable.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    if (!fileContext.isCompressedOrEncrypted()) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      // which is used for block serialization to L2 cache, does not preserve encoding and<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      // encryption details.<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      return this;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span><a name="line.549"></a>
-<span class="sourceLineNo">550</span>    HFileBlock unpacked = new HFileBlock(this);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>    unpacked.allocateBuffer(); // allocates space for the decompressed block<a name="line.551"></a>
-<span class="sourceLineNo">552</span><a name="line.552"></a>
-<span class="sourceLineNo">553</span>    HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?<a name="line.553"></a>
-<span class="sourceLineNo">554</spa

<TRUNCATED>

[51/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/3e48e84d
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/3e48e84d
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/3e48e84d

Branch: refs/heads/asf-site
Commit: 3e48e84d3979584361144761d76f12b7497fb090
Parents: 4ce8323
Author: jenkins <bu...@apache.org>
Authored: Fri Mar 4 15:32:45 2016 +0000
Committer: Misty Stanley-Jones <ms...@cloudera.com>
Committed: Fri Mar 4 09:50:06 2016 -0800

----------------------------------------------------------------------
 acid-semantics.html                             |     4 +-
 apache_hbase_reference_guide.pdf                |     4 +-
 apache_hbase_reference_guide.pdfmarks           |     4 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |   598 +-
 .../hbase/filter/CompareFilter.CompareOp.html   |     4 +-
 .../hbase/mapreduce/TableInputFormatBase.html   |    36 +-
 .../org/apache/hadoop/hbase/HConstants.html     |  2356 +--
 .../hbase/mapreduce/TableInputFormatBase.html   |  1245 +-
 book.html                                       |     2 +-
 bulk-loads.html                                 |     4 +-
 checkstyle-aggregate.html                       | 15431 ++++++++---------
 checkstyle.rss                                  |    16 +-
 coc.html                                        |     4 +-
 cygwin.html                                     |     4 +-
 dependencies.html                               |     4 +-
 dependency-convergence.html                     |     4 +-
 dependency-info.html                            |     4 +-
 dependency-management.html                      |     4 +-
 devapidocs/constant-values.html                 |    18 +-
 devapidocs/index-all.html                       |    76 +-
 .../apache/hadoop/hbase/HConstants.Modify.html  |    18 +-
 .../hbase/HConstants.OperationStatusCode.html   |    16 +-
 .../org/apache/hadoop/hbase/HConstants.html     |   600 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |     2 +-
 .../class-use/InterfaceAudience.Private.html    |     4 +-
 .../class-use/InterfaceStability.Unstable.html  |    12 +-
 .../hbase/classification/package-tree.html      |     6 +-
 .../hadoop/hbase/client/package-tree.html       |     4 +-
 .../PrefixTreeSeeker.OffheapPrefixTreeCell.html |    96 +-
 .../PrefixTreeSeeker.OnheapPrefixTreeCell.html  |    76 +-
 .../codec/prefixtree/PrefixTreeSeeker.html      |    38 +-
 .../hadoop/hbase/filter/package-tree.html       |    10 +-
 .../hadoop/hbase/io/class-use/HeapSize.html     |     4 +-
 ...kEncoder.BufferedDataBlockEncodingState.html |     6 +-
 ...dDataBlockEncoder.BufferedEncodedSeeker.html |    58 +-
 ...eredDataBlockEncoder.OffheapDecodedCell.html |   100 +-
 ...feredDataBlockEncoder.OnheapDecodedCell.html |    82 +-
 .../BufferedDataBlockEncoder.SeekerState.html   |    56 +-
 .../io/encoding/BufferedDataBlockEncoder.html   |    37 +-
 .../DataBlockEncoder.EncodedSeeker.html         |    18 +-
 .../hbase/io/encoding/DataBlockEncoder.html     |    16 +-
 .../hfile/CacheConfig.ExternalBlockCaches.html  |    10 +-
 .../hadoop/hbase/io/hfile/CacheConfig.html      |   146 +-
 .../io/hfile/HFile.CachingBlockReader.html      |     6 +-
 .../hadoop/hbase/io/hfile/HFile.FileInfo.html   |    74 +-
 .../hadoop/hbase/io/hfile/HFile.Reader.html     |    64 +-
 .../hadoop/hbase/io/hfile/HFile.Writer.html     |    20 +-
 .../hbase/io/hfile/HFile.WriterFactory.html     |    36 +-
 .../org/apache/hadoop/hbase/io/hfile/HFile.html |    52 +-
 .../io/hfile/HFileBlock.BlockIterator.html      |     6 +-
 .../io/hfile/HFileBlock.BlockWritable.html      |     6 +-
 .../hbase/io/hfile/HFileBlock.FSReader.html     |    16 +-
 .../hbase/io/hfile/HFileBlock.FSReaderImpl.html |    54 +-
 .../io/hfile/HFileBlock.PrefetchedHeader.html   |    52 +-
 .../hbase/io/hfile/HFileBlock.Writer.State.html |    12 +-
 .../hbase/io/hfile/HFileBlock.Writer.html       |   102 +-
 .../hadoop/hbase/io/hfile/HFileBlock.html       |   445 +-
 .../hfile/HFileReaderImpl.EncodedScanner.html   |    42 +-
 .../hfile/HFileReaderImpl.HFileScannerImpl.html |     6 +-
 .../hadoop/hbase/io/hfile/HFileReaderImpl.html  |    36 +-
 .../hadoop/hbase/io/hfile/HFileScanner.html     |    40 +-
 .../hadoop/hbase/io/hfile/HFileWriterImpl.html  |   164 +-
 .../bucket/BucketAllocator.IndexStatistics.html |    30 +-
 .../hbase/io/hfile/bucket/BucketAllocator.html  |    57 +-
 .../hfile/bucket/BucketCache.BucketEntry.html   |    42 +-
 .../bucket/BucketCache.BucketEntryGroup.html    |    22 +-
 .../hfile/bucket/BucketCache.RAMQueueEntry.html |    20 +-
 .../bucket/BucketCache.StatisticsThread.html    |     8 +-
 .../hfile/bucket/BucketCache.WriterThread.html  |    14 +-
 .../hbase/io/hfile/bucket/BucketCache.html      |    76 +-
 .../hbase/io/hfile/bucket/package-summary.html  |     2 +-
 .../hbase/io/hfile/bucket/package-use.html      |     2 +-
 .../hbase/io/hfile/class-use/BlockCache.html    |     2 +-
 .../hbase/io/hfile/class-use/BlockType.html     |    24 +-
 .../hbase/io/hfile/class-use/CacheConfig.html   |     2 +-
 .../hbase/io/hfile/class-use/Cacheable.html     |     6 +-
 .../hfile/class-use/CacheableDeserializer.html  |     4 +-
 .../class-use/HFileBlock.BlockWritable.html     |     2 +-
 .../io/hfile/class-use/HFileBlock.Writer.html   |     2 +-
 .../hbase/io/hfile/class-use/HFileBlock.html    |     2 +-
 .../hbase/io/hfile/class-use/HFileContext.html  |     2 +-
 .../hadoop/hbase/io/hfile/package-summary.html  |     2 +-
 .../hadoop/hbase/io/hfile/package-tree.html     |     7 +-
 .../hadoop/hbase/io/hfile/package-use.html      |     2 +-
 .../hbase/mapreduce/TableInputFormatBase.html   |    64 +-
 .../SplitLogManager.ResubmitDirective.html      |    10 +-
 .../hbase/master/SplitLogManager.Task.html      |    32 +-
 .../hbase/master/SplitLogManager.TaskBatch.html |    14 +-
 .../SplitLogManager.TerminationStatus.html      |    18 +-
 .../master/SplitLogManager.TimeoutMonitor.html  |     8 +-
 .../hadoop/hbase/master/SplitLogManager.html    |    76 +-
 .../hadoop/hbase/master/package-tree.html       |     4 +-
 .../hbase/master/procedure/package-tree.html    |     2 +-
 .../hadoop/hbase/nio/class-use/ByteBuff.html    |     2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |    10 +-
 .../hadoop/hbase/procedure2/package-tree.html   |     2 +-
 .../hadoop/hbase/quotas/package-tree.html       |     6 +-
 .../regionserver/HStore.StoreFlusherImpl.html   |    28 +-
 .../hadoop/hbase/regionserver/HStore.html       |   108 +-
 .../hbase/regionserver/RegionSplitPolicy.html   |     6 +-
 .../hadoop/hbase/regionserver/package-tree.html |    22 +-
 .../CompactionThroughputControllerFactory.html  |     8 +-
 ...BulkLoadEndpoint.SecureBulkLoadListener.html |    22 +-
 .../security/access/SecureBulkLoadEndpoint.html |    10 +-
 .../hbase/security/access/package-tree.html     |     4 +-
 .../hadoop/hbase/security/package-tree.html     |     4 +-
 .../hadoop/hbase/security/token/TokenUtil.html  |    26 +-
 .../hadoop/hbase/thrift/package-tree.html       |     2 +-
 .../tmpl/master/MasterStatusTmpl.ImplData.html  |   240 +-
 .../hbase/tmpl/master/MasterStatusTmpl.html     |    96 +-
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |    48 +-
 .../regionserver/RSStatusTmpl.ImplData.html     |   120 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.html   |    48 +-
 .../tmpl/regionserver/RSStatusTmplImpl.html     |    24 +-
 .../hadoop/hbase/util/class-use/Counter.html    |     4 +-
 .../apache/hadoop/hbase/util/package-tree.html  |    10 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |     2 +-
 devapidocs/overview-tree.html                   |     4 +-
 .../apache/hadoop/hbase/HConstants.Modify.html  |  2356 +--
 .../hbase/HConstants.OperationStatusCode.html   |  2356 +--
 .../org/apache/hadoop/hbase/HConstants.html     |  2356 +--
 .../PrefixTreeSeeker.OffheapPrefixTreeCell.html |  1113 +-
 .../PrefixTreeSeeker.OnheapPrefixTreeCell.html  |  1113 +-
 .../codec/prefixtree/PrefixTreeSeeker.html      |  1113 +-
 ...kEncoder.BufferedDataBlockEncodingState.html |  2228 +--
 ...dDataBlockEncoder.BufferedEncodedSeeker.html |  2228 +--
 ...eredDataBlockEncoder.OffheapDecodedCell.html |  2228 +--
 ...feredDataBlockEncoder.OnheapDecodedCell.html |  2228 +--
 .../BufferedDataBlockEncoder.SeekerState.html   |  2228 +--
 .../io/encoding/BufferedDataBlockEncoder.html   |  2228 +--
 .../DataBlockEncoder.EncodedSeeker.html         |   313 +-
 .../hbase/io/encoding/DataBlockEncoder.html     |   313 +-
 .../hfile/CacheConfig.ExternalBlockCaches.html  |  1184 +-
 .../hadoop/hbase/io/hfile/CacheConfig.html      |  1184 +-
 .../io/hfile/HFile.CachingBlockReader.html      |  1467 +-
 .../hadoop/hbase/io/hfile/HFile.FileInfo.html   |  1467 +-
 .../hadoop/hbase/io/hfile/HFile.Reader.html     |  1467 +-
 .../hadoop/hbase/io/hfile/HFile.Writer.html     |  1467 +-
 .../hbase/io/hfile/HFile.WriterFactory.html     |  1467 +-
 .../org/apache/hadoop/hbase/io/hfile/HFile.html |  1467 +-
 .../io/hfile/HFileBlock.BlockIterator.html      |  3870 +++--
 .../io/hfile/HFileBlock.BlockWritable.html      |  3870 +++--
 .../hbase/io/hfile/HFileBlock.FSReader.html     |  3870 +++--
 .../hbase/io/hfile/HFileBlock.FSReaderImpl.html |  3870 +++--
 .../io/hfile/HFileBlock.PrefetchedHeader.html   |  3870 +++--
 .../hbase/io/hfile/HFileBlock.Writer.State.html |  3870 +++--
 .../hbase/io/hfile/HFileBlock.Writer.html       |  3870 +++--
 .../hadoop/hbase/io/hfile/HFileBlock.html       |  3870 +++--
 .../hadoop/hbase/io/hfile/HFileContext.html     |    26 +-
 ...ReaderImpl.BlockIndexNotLoadedException.html |   815 +-
 .../hfile/HFileReaderImpl.EncodedScanner.html   |   815 +-
 ...FileScannerImpl.ShareableMemoryKeyValue.html |   815 +-
 ...annerImpl.ShareableMemoryNoTagsKeyValue.html |   815 +-
 ...nnerImpl.ShareableMemoryOffheapKeyValue.html |   815 +-
 .../hfile/HFileReaderImpl.HFileScannerImpl.html |   815 +-
 .../HFileReaderImpl.NotSeekedException.html     |   815 +-
 .../hadoop/hbase/io/hfile/HFileReaderImpl.html  |   815 +-
 .../hadoop/hbase/io/hfile/HFileScanner.html     |   275 +-
 .../hadoop/hbase/io/hfile/HFileWriterImpl.html  |  1403 +-
 .../io/hfile/bucket/BucketAllocator.Bucket.html |   571 +-
 .../bucket/BucketAllocator.BucketSizeInfo.html  |   571 +-
 .../bucket/BucketAllocator.IndexStatistics.html |   571 +-
 .../hbase/io/hfile/bucket/BucketAllocator.html  |   571 +-
 .../hfile/bucket/BucketCache.BucketEntry.html   |  2205 +--
 .../bucket/BucketCache.BucketEntryGroup.html    |  2205 +--
 .../hfile/bucket/BucketCache.RAMQueueEntry.html |  2205 +--
 .../bucket/BucketCache.StatisticsThread.html    |  2205 +--
 .../hfile/bucket/BucketCache.WriterThread.html  |  2205 +--
 .../hbase/io/hfile/bucket/BucketCache.html      |  2205 +--
 .../hbase/mapreduce/TableInputFormatBase.html   |  1245 +-
 .../SplitLogManager.ResubmitDirective.html      |  1535 +-
 .../hbase/master/SplitLogManager.Task.html      |  1535 +-
 .../hbase/master/SplitLogManager.TaskBatch.html |  1535 +-
 .../SplitLogManager.TerminationStatus.html      |  1535 +-
 .../master/SplitLogManager.TimeoutMonitor.html  |  1535 +-
 .../hadoop/hbase/master/SplitLogManager.html    |  1535 +-
 .../regionserver/HStore.StoreFlusherImpl.html   |  1279 +-
 .../hadoop/hbase/regionserver/HStore.html       |  1279 +-
 .../hbase/regionserver/RegionSplitPolicy.html   |   121 +-
 .../CompactionThroughputControllerFactory.html  |    94 +-
 ...BulkLoadEndpoint.SecureBulkLoadListener.html |   338 +-
 .../security/access/SecureBulkLoadEndpoint.html |   338 +-
 .../hadoop/hbase/security/token/TokenUtil.html  |   517 +-
 .../tmpl/master/MasterStatusTmpl.ImplData.html  |   240 +-
 .../tmpl/master/MasterStatusTmpl.Intf.html      |   240 +-
 .../hbase/tmpl/master/MasterStatusTmpl.html     |   240 +-
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |    64 +-
 .../regionserver/RSStatusTmpl.ImplData.html     |   120 +-
 .../tmpl/regionserver/RSStatusTmpl.Intf.html    |   120 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.html   |   120 +-
 .../tmpl/regionserver/RSStatusTmplImpl.html     |    32 +-
 distribution-management.html                    |     4 +-
 export_control.html                             |     4 +-
 hbase-annotations/checkstyle.html               |     6 +-
 hbase-annotations/dependencies.html             |     6 +-
 hbase-annotations/dependency-convergence.html   |     6 +-
 hbase-annotations/dependency-info.html          |     6 +-
 hbase-annotations/dependency-management.html    |     6 +-
 hbase-annotations/distribution-management.html  |     6 +-
 hbase-annotations/index.html                    |     6 +-
 hbase-annotations/integration.html              |     6 +-
 hbase-annotations/issue-tracking.html           |     6 +-
 hbase-annotations/license.html                  |     6 +-
 hbase-annotations/mail-lists.html               |     6 +-
 hbase-annotations/plugin-management.html        |     6 +-
 hbase-annotations/plugins.html                  |     6 +-
 hbase-annotations/project-info.html             |     6 +-
 hbase-annotations/project-reports.html          |     6 +-
 hbase-annotations/project-summary.html          |     6 +-
 hbase-annotations/source-repository.html        |     6 +-
 hbase-annotations/team-list.html                |     6 +-
 hbase-archetypes/dependencies.html              |     6 +-
 hbase-archetypes/dependency-convergence.html    |     6 +-
 hbase-archetypes/dependency-info.html           |     6 +-
 hbase-archetypes/dependency-management.html     |     6 +-
 hbase-archetypes/distribution-management.html   |     6 +-
 .../hbase-archetype-builder/dependencies.html   |     6 +-
 .../dependency-convergence.html                 |     6 +-
 .../dependency-info.html                        |     6 +-
 .../dependency-management.html                  |     6 +-
 .../distribution-management.html                |     6 +-
 .../hbase-archetype-builder/index.html          |     6 +-
 .../hbase-archetype-builder/integration.html    |     6 +-
 .../hbase-archetype-builder/issue-tracking.html |     6 +-
 .../hbase-archetype-builder/license.html        |     6 +-
 .../hbase-archetype-builder/mail-lists.html     |     6 +-
 .../plugin-management.html                      |     6 +-
 .../hbase-archetype-builder/plugins.html        |     6 +-
 .../hbase-archetype-builder/project-info.html   |     6 +-
 .../project-summary.html                        |     6 +-
 .../source-repository.html                      |     6 +-
 .../hbase-archetype-builder/team-list.html      |     6 +-
 .../hbase-client-project/checkstyle.html        |     6 +-
 .../hbase-client-project/dependencies.html      |     6 +-
 .../dependency-convergence.html                 |     6 +-
 .../hbase-client-project/dependency-info.html   |     6 +-
 .../dependency-management.html                  |     6 +-
 .../distribution-management.html                |     6 +-
 .../hbase-client-project/index.html             |     6 +-
 .../hbase-client-project/integration.html       |     6 +-
 .../hbase-client-project/issue-tracking.html    |     6 +-
 .../hbase-client-project/license.html           |     6 +-
 .../hbase-client-project/mail-lists.html        |     6 +-
 .../hbase-client-project/plugin-management.html |     6 +-
 .../hbase-client-project/plugins.html           |     6 +-
 .../hbase-client-project/project-info.html      |     6 +-
 .../hbase-client-project/project-reports.html   |     6 +-
 .../hbase-client-project/project-summary.html   |     6 +-
 .../hbase-client-project/source-repository.html |     6 +-
 .../hbase-client-project/team-list.html         |     6 +-
 .../hbase-shaded-client-project/checkstyle.html |     6 +-
 .../dependencies.html                           |     6 +-
 .../dependency-convergence.html                 |     6 +-
 .../dependency-info.html                        |     6 +-
 .../dependency-management.html                  |     6 +-
 .../distribution-management.html                |     6 +-
 .../hbase-shaded-client-project/index.html      |     6 +-
 .../integration.html                            |     6 +-
 .../issue-tracking.html                         |     6 +-
 .../hbase-shaded-client-project/license.html    |     6 +-
 .../hbase-shaded-client-project/mail-lists.html |     6 +-
 .../plugin-management.html                      |     6 +-
 .../hbase-shaded-client-project/plugins.html    |     6 +-
 .../project-info.html                           |     6 +-
 .../project-reports.html                        |     6 +-
 .../project-summary.html                        |     6 +-
 .../source-repository.html                      |     6 +-
 .../hbase-shaded-client-project/team-list.html  |     6 +-
 hbase-archetypes/index.html                     |     6 +-
 hbase-archetypes/integration.html               |     6 +-
 hbase-archetypes/issue-tracking.html            |     6 +-
 hbase-archetypes/license.html                   |     6 +-
 hbase-archetypes/mail-lists.html                |     6 +-
 hbase-archetypes/modules.html                   |     6 +-
 hbase-archetypes/plugin-management.html         |     6 +-
 hbase-archetypes/plugins.html                   |     6 +-
 hbase-archetypes/project-info.html              |     6 +-
 hbase-archetypes/project-summary.html           |     6 +-
 hbase-archetypes/source-repository.html         |     6 +-
 hbase-archetypes/team-list.html                 |     6 +-
 hbase-spark/checkstyle.html                     |     6 +-
 hbase-spark/dependencies.html                   |     6 +-
 hbase-spark/dependency-convergence.html         |     6 +-
 hbase-spark/dependency-info.html                |     6 +-
 hbase-spark/dependency-management.html          |     6 +-
 hbase-spark/distribution-management.html        |     6 +-
 hbase-spark/index.html                          |     6 +-
 hbase-spark/integration.html                    |     6 +-
 hbase-spark/issue-tracking.html                 |     6 +-
 hbase-spark/license.html                        |     6 +-
 hbase-spark/mail-lists.html                     |     6 +-
 hbase-spark/plugin-management.html              |     6 +-
 hbase-spark/plugins.html                        |     6 +-
 hbase-spark/project-info.html                   |     6 +-
 hbase-spark/project-reports.html                |     6 +-
 hbase-spark/project-summary.html                |     6 +-
 hbase-spark/source-repository.html              |     6 +-
 hbase-spark/team-list.html                      |     6 +-
 index.html                                      |     4 +-
 integration.html                                |     4 +-
 issue-tracking.html                             |     4 +-
 license.html                                    |     4 +-
 mail-lists.html                                 |     4 +-
 metrics.html                                    |     4 +-
 modules.html                                    |     4 +-
 old_news.html                                   |     4 +-
 plugin-management.html                          |     4 +-
 plugins.html                                    |     4 +-
 poweredbyhbase.html                             |     4 +-
 project-info.html                               |     4 +-
 project-reports.html                            |     4 +-
 project-summary.html                            |     4 +-
 pseudo-distributed.html                         |     4 +-
 replication.html                                |     4 +-
 resources.html                                  |     4 +-
 source-repository.html                          |     4 +-
 sponsors.html                                   |     4 +-
 supportingprojects.html                         |     4 +-
 team-list.html                                  |     4 +-
 testdevapidocs/allclasses-frame.html            |     1 +
 testdevapidocs/allclasses-noframe.html          |     1 +
 testdevapidocs/constant-values.html             |    40 +
 testdevapidocs/index-all.html                   |    54 +
 .../hbase/CategoryBasedTimeout.Builder.html     |     6 +-
 .../hadoop/hbase/CategoryBasedTimeout.html      |    10 +-
 .../org/apache/hadoop/hbase/TestNamespace.html  |    44 +-
 .../hbase/class-use/HBaseTestingUtility.html    |    20 +-
 .../client/TestHTableMultiplexerFlushCache.html |    32 +-
 .../TestMobRestoreSnapshotFromClient.html       |    18 +-
 .../hadoop/hbase/client/TestMultiParallel.html  |    72 +-
 .../TestMasterObserver.CPMasterObserver.html    |   542 +-
 .../hbase/coprocessor/TestMasterObserver.html   |    50 +-
 .../apache/hadoop/hbase/io/TestHFileLink.html   |     8 +-
 .../hbase/io/encoding/TestChangingEncoding.html |    60 +-
 .../io/encoding/TestDataBlockEncoders.html      |    65 +-
 .../encoding/TestSeekToBlockWithEncoders.html   |    13 +
 .../io/hfile/TestChecksum.FSReaderImplTest.html |    34 +-
 .../apache/hadoop/hbase/io/hfile/TestHFile.html |     4 +-
 .../io/hfile/TestHFileBackedByBucketCache.html  |   522 +
 .../hadoop/hbase/io/hfile/TestHFileBlock.html   |     4 +-
 ...estHFileBlockCompatibility.Writer.State.html |    12 +-
 .../TestHFileBlockCompatibility.Writer.html     |   236 +-
 .../class-use/TestHFileBackedByBucketCache.html |   115 +
 .../hadoop/hbase/io/hfile/package-frame.html    |     1 +
 .../hadoop/hbase/io/hfile/package-summary.html  |    52 +-
 .../hadoop/hbase/io/hfile/package-tree.html     |     9 +-
 ...stTableInputFormat.ExampleDeprecatedTIF.html |     6 +-
 ...leInputFormat.ExampleJobConfigurableTIF.html |     8 +-
 .../mapred/TestTableInputFormat.ExampleTIF.html |     8 +-
 .../TestTableInputFormat.ExampleVerifier.html   |    10 +-
 .../hbase/mapred/TestTableInputFormat.html      |    48 +-
 ...tionTestBulkLoad.CompositeKeyComparator.html |     6 +-
 .../IntegrationTestBulkLoad.EmptySplit.html     |    12 +-
 ...tegrationTestBulkLoad.FixedRecordReader.html |    22 +-
 ...ationTestBulkLoad.ITBulkLoadInputFormat.html |     8 +-
 .../IntegrationTestBulkLoad.LinkChain.html      |    20 +-
 .../IntegrationTestBulkLoad.LinkKey.html        |    20 +-
 ...onTestBulkLoad.LinkedListCheckingMapper.html |     6 +-
 ...nTestBulkLoad.LinkedListCheckingReducer.html |     8 +-
 ...onTestBulkLoad.LinkedListCreationMapper.html |    10 +-
 ...stBulkLoad.NaturalKeyGroupingComparator.html |     6 +-
 ...ationTestBulkLoad.NaturalKeyPartitioner.html |     6 +-
 ...nTestBulkLoad.SlowMeCoproScanOperations.html |    18 +-
 .../mapreduce/IntegrationTestBulkLoad.html      |    72 +-
 ...eOutputFormat2.RandomKVGeneratingMapper.html |    22 +-
 .../hbase/mapreduce/TestHFileOutputFormat2.html |    84 +-
 .../master/TestMetricsMasterSourceFactory.html  |     6 +-
 .../TestBaseLoadBalancer.MockBalancer.html      |     8 +-
 .../master/balancer/TestBaseLoadBalancer.html   |    40 +-
 .../procedure/TestMasterProcedureEvents.html    |    24 +-
 ...ocedureScheduler.TestNamespaceProcedure.html |    14 +-
 ...sterProcedureScheduler.TestTableProcSet.html |    18 +-
 ...erProcedureScheduler.TestTableProcedure.html |    14 +-
 .../procedure/TestMasterProcedureScheduler.html |    28 +-
 .../procedure/TestWALProcedureStoreOnHDFS.html  |    26 +-
 .../hbase/metrics/TestBaseSourceImpl.html       |    18 +-
 .../org/apache/hadoop/hbase/package-tree.html   |    12 +-
 .../TestProcedureMember.EmptySubprocedure.html  |     4 +-
 .../hbase/procedure/TestProcedureMember.html    |    50 +-
 .../TestYieldProcedures.TestProcEnv.html        |     8 +-
 ...TestStateMachineProcedure.ExecutionInfo.html |    16 +-
 ...cedures.TestStateMachineProcedure.State.html |    12 +-
 ...eldProcedures.TestStateMachineProcedure.html |    30 +-
 .../hbase/procedure2/TestYieldProcedures.html   |    30 +-
 .../store/TestProcedureStoreTracker.html        |    20 +-
 .../wal/TestWALProcedureStore.LoadCounter.html  |    34 +-
 ...LProcedureStore.TestSequentialProcedure.html |    16 +-
 .../store/wal/TestWALProcedureStore.html        |    54 +-
 .../hadoop/hbase/quotas/TestQuotaState.html     |    24 +-
 .../regionserver/TestCompactSplitThread.html    |    14 +-
 ...tEndToEndSplitTransaction.RegionChecker.html |    24 +-
 ...EndToEndSplitTransaction.RegionSplitter.html |    22 +-
 .../TestEndToEndSplitTransaction.html           |    32 +-
 .../hbase/regionserver/TestQueryMatcher.html    |    52 +-
 ...rRetriableFailure.FaultyScannerObserver.html |     8 +-
 .../TestScannerRetriableFailure.html            |    30 +-
 .../hbase/regionserver/TestStoreFile.html       |    72 +-
 .../hadoop/hbase/regionserver/package-tree.html |     2 +-
 .../regionserver/wal/SequenceFileLogWriter.html |    32 +-
 ...stCustomWALCellCodec.CustomWALCellCodec.html |     8 +-
 .../wal/TestCustomWALCellCodec.html             |     6 +-
 .../regionserver/wal/TestLogRollPeriod.html     |    20 +-
 .../rest/model/TestNamespacesInstanceModel.html |    22 +-
 .../hbase/security/TestEncryptionUtil.html      |    10 +-
 .../TestAccessController.BulkLoadHelper.html    |    16 +-
 .../TestAccessController.PingCoprocessor.html   |    20 +-
 ...tAccessController.TestTableDDLProcedure.html |    22 +-
 .../security/access/TestAccessController.html   |   204 +-
 .../MobSnapshotTestingUtils.SnapshotMock.html   |     6 +-
 .../hbase/snapshot/MobSnapshotTestingUtils.html |    12 +-
 .../apache/hadoop/hbase/test/package-tree.html  |     4 +-
 .../hbase/thrift/TestThriftHttpServer.html      |    38 +-
 ...ultiThreadedAction.DefaultDataGenerator.html |    26 +-
 .../MultiThreadedAction.ProgressReporter.html   |    10 +-
 .../hadoop/hbase/util/MultiThreadedAction.html  |    66 +-
 ...UpdaterThreadWithACL.MutateAccessAction.html |    36 +-
 ...pdaterWithACL.HBaseUpdaterThreadWithACL.html |    18 +-
 .../hbase/util/MultiThreadedUpdaterWithACL.html |    18 +-
 .../hadoop/hbase/util/TestHBaseFsckTwoRS.html   |    28 +-
 .../util/hbck/OfflineMetaRebuildTestCore.html   |    46 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |     4 +-
 .../hadoop/hbase/zookeeper/TestZKConfig.html    |    16 +-
 .../hadoop/hbase/zookeeper/TestZKUtil.html      |     6 +-
 testdevapidocs/overview-tree.html               |     7 +-
 .../hbase/CategoryBasedTimeout.Builder.html     |    95 +-
 .../hadoop/hbase/CategoryBasedTimeout.html      |    95 +-
 .../org/apache/hadoop/hbase/TestNamespace.html  |   739 +-
 .../client/TestHTableMultiplexerFlushCache.html |   277 +-
 .../TestMobRestoreSnapshotFromClient.html       |   118 +-
 .../hadoop/hbase/client/TestMultiParallel.html  |  1381 +-
 .../TestMasterObserver.CPMasterObserver.html    |  3659 ++--
 .../hbase/coprocessor/TestMasterObserver.html   |  3659 ++--
 .../apache/hadoop/hbase/io/TestHFileLink.html   |   201 +-
 .../hbase/io/encoding/TestChangingEncoding.html |   439 +-
 .../io/encoding/TestDataBlockEncoders.html      |   741 +-
 .../encoding/TestSeekToBlockWithEncoders.html   |     4 +-
 .../io/hfile/TestChecksum.FSReaderImplTest.html |    12 +-
 .../hadoop/hbase/io/hfile/TestChecksum.html     |    12 +-
 .../io/hfile/TestForceCacheImportantBlocks.html |     6 +-
 .../io/hfile/TestHFileBackedByBucketCache.html  |   303 +
 .../hfile/TestHFileBlock.BlockReaderThread.html |     2 +-
 .../hadoop/hbase/io/hfile/TestHFileBlock.html   |     2 +-
 ...estHFileBlockCompatibility.Writer.State.html |   803 +-
 .../TestHFileBlockCompatibility.Writer.html     |   803 +-
 .../io/hfile/TestHFileBlockCompatibility.html   |   803 +-
 ...stTableInputFormat.ExampleDeprecatedTIF.html |   800 +-
 ...leInputFormat.ExampleJobConfigurableTIF.html |   800 +-
 .../mapred/TestTableInputFormat.ExampleTIF.html |   800 +-
 .../TestTableInputFormat.ExampleVerifier.html   |   800 +-
 .../hbase/mapred/TestTableInputFormat.html      |   800 +-
 ...tionTestBulkLoad.CompositeKeyComparator.html |  1421 +-
 .../IntegrationTestBulkLoad.EmptySplit.html     |  1421 +-
 ...tegrationTestBulkLoad.FixedRecordReader.html |  1421 +-
 ...ationTestBulkLoad.ITBulkLoadInputFormat.html |  1421 +-
 .../IntegrationTestBulkLoad.LinkChain.html      |  1421 +-
 .../IntegrationTestBulkLoad.LinkKey.html        |  1421 +-
 ...onTestBulkLoad.LinkedListCheckingMapper.html |  1421 +-
 ...nTestBulkLoad.LinkedListCheckingReducer.html |  1421 +-
 ...onTestBulkLoad.LinkedListCreationMapper.html |  1421 +-
 ...stBulkLoad.NaturalKeyGroupingComparator.html |  1421 +-
 ...ationTestBulkLoad.NaturalKeyPartitioner.html |  1421 +-
 ...nTestBulkLoad.SlowMeCoproScanOperations.html |  1421 +-
 .../mapreduce/IntegrationTestBulkLoad.html      |  1421 +-
 ...eOutputFormat2.RandomKVGeneratingMapper.html |  2116 ++-
 .../hbase/mapreduce/TestHFileOutputFormat2.html |  2116 ++-
 .../master/TestMetricsMasterSourceFactory.html  |    37 +-
 .../TestBaseLoadBalancer.MockBalancer.html      |   946 +-
 .../master/balancer/TestBaseLoadBalancer.html   |   946 +-
 .../procedure/TestMasterProcedureEvents.html    |   306 +-
 ...ocedureScheduler.TestNamespaceProcedure.html |  1030 +-
 ...sterProcedureScheduler.TestTableProcSet.html |  1030 +-
 ...erProcedureScheduler.TestTableProcedure.html |  1030 +-
 .../procedure/TestMasterProcedureScheduler.html |  1030 +-
 .../procedure/TestWALProcedureStoreOnHDFS.html  |   352 +-
 .../hbase/metrics/TestBaseSourceImpl.html       |   133 +-
 .../TestProcedureMember.EmptySubprocedure.html  |   825 +-
 .../hbase/procedure/TestProcedureMember.html    |   825 +-
 .../TestYieldProcedures.TestProcEnv.html        |   479 +-
 ...TestStateMachineProcedure.ExecutionInfo.html |   479 +-
 ...cedures.TestStateMachineProcedure.State.html |   479 +-
 ...eldProcedures.TestStateMachineProcedure.html |   479 +-
 .../hbase/procedure2/TestYieldProcedures.html   |   479 +-
 .../store/TestProcedureStoreTracker.html        |   421 +-
 .../wal/TestWALProcedureStore.LoadCounter.html  |  1199 +-
 ...LProcedureStore.TestSequentialProcedure.html |  1199 +-
 .../store/wal/TestWALProcedureStore.html        |  1199 +-
 .../hadoop/hbase/quotas/TestQuotaState.html     |   401 +-
 .../hbase/regionserver/TestBlocksRead.html      |     2 +-
 .../regionserver/TestCompactSplitThread.html    |   155 +-
 ...tEndToEndSplitTransaction.RegionChecker.html |   937 +-
 ...EndToEndSplitTransaction.RegionSplitter.html |   937 +-
 .../TestEndToEndSplitTransaction.html           |   937 +-
 .../hbase/regionserver/TestQueryMatcher.html    |   630 +-
 ...rRetriableFailure.FaultyScannerObserver.html |   222 +-
 .../TestScannerRetriableFailure.html            |   222 +-
 .../hbase/regionserver/TestStoreFile.html       |  2025 ++-
 .../regionserver/wal/SequenceFileLogWriter.html |   371 +-
 ...stCustomWALCellCodec.CustomWALCellCodec.html |    83 +-
 .../wal/TestCustomWALCellCodec.html             |    83 +-
 .../regionserver/wal/TestLogRollPeriod.html     |   283 +-
 .../rest/model/TestNamespacesInstanceModel.html |   143 +-
 .../hbase/security/TestEncryptionUtil.html      |   209 +-
 .../TestAccessController.BulkLoadHelper.html    |  5346 +++---
 .../TestAccessController.PingCoprocessor.html   |  5346 +++---
 ...tAccessController.TestTableDDLProcedure.html |  5346 +++---
 .../security/access/TestAccessController.html   |  5346 +++---
 .../MobSnapshotTestingUtils.SnapshotMock.html   |   239 +-
 .../hbase/snapshot/MobSnapshotTestingUtils.html |   239 +-
 .../hbase/thrift/TestThriftHttpServer.html      |   323 +-
 ...ultiThreadedAction.DefaultDataGenerator.html |  1019 +-
 .../MultiThreadedAction.ProgressReporter.html   |  1019 +-
 .../hadoop/hbase/util/MultiThreadedAction.html  |  1019 +-
 ...UpdaterThreadWithACL.MutateAccessAction.html |   475 +-
 ...pdaterWithACL.HBaseUpdaterThreadWithACL.html |   475 +-
 .../hbase/util/MultiThreadedUpdaterWithACL.html |   475 +-
 .../hadoop/hbase/util/TestHBaseFsckTwoRS.html   |   949 +-
 .../util/hbck/OfflineMetaRebuildTestCore.html   |   489 +-
 .../hadoop/hbase/zookeeper/TestZKConfig.html    |   197 +-
 .../hadoop/hbase/zookeeper/TestZKUtil.html      |    63 +-
 xref-test/allclasses-frame.html                 |     3 +
 .../hadoop/hbase/CategoryBasedTimeout.html      |    95 +-
 .../org/apache/hadoop/hbase/TestNamespace.html  |   739 +-
 .../client/TestHTableMultiplexerFlushCache.html |   277 +-
 .../TestMobRestoreSnapshotFromClient.html       |   118 +-
 .../hadoop/hbase/client/TestMultiParallel.html  |  1381 +-
 .../hbase/coprocessor/TestMasterObserver.html   |  3659 ++--
 .../apache/hadoop/hbase/io/TestHFileLink.html   |   201 +-
 .../hbase/io/encoding/TestChangingEncoding.html |   439 +-
 .../io/encoding/TestDataBlockEncoders.html      |   741 +-
 .../encoding/TestSeekToBlockWithEncoders.html   |     4 +-
 .../hadoop/hbase/io/hfile/TestChecksum.html     |    12 +-
 .../io/hfile/TestForceCacheImportantBlocks.html |     6 +-
 .../io/hfile/TestHFileBackedByBucketCache.html  |   245 +
 .../hadoop/hbase/io/hfile/TestHFileBlock.html   |     2 +-
 .../io/hfile/TestHFileBlockCompatibility.html   |   803 +-
 .../hadoop/hbase/io/hfile/package-frame.html    |     3 +
 .../hadoop/hbase/io/hfile/package-summary.html  |     5 +
 .../hbase/mapred/TestTableInputFormat.html      |   800 +-
 .../mapreduce/IntegrationTestBulkLoad.html      |  1421 +-
 .../hbase/mapreduce/TestHFileOutputFormat2.html |  2116 ++-
 .../master/TestMetricsMasterSourceFactory.html  |    37 +-
 .../master/balancer/TestBaseLoadBalancer.html   |   946 +-
 .../procedure/TestMasterProcedureEvents.html    |   306 +-
 .../procedure/TestMasterProcedureScheduler.html |  1030 +-
 .../procedure/TestWALProcedureStoreOnHDFS.html  |   352 +-
 .../hbase/metrics/TestBaseSourceImpl.html       |   133 +-
 .../hbase/procedure/TestProcedureMember.html    |   825 +-
 .../hbase/procedure2/TestYieldProcedures.html   |   479 +-
 .../store/TestProcedureStoreTracker.html        |   421 +-
 .../store/wal/TestWALProcedureStore.html        |  1199 +-
 .../hadoop/hbase/quotas/TestQuotaState.html     |   401 +-
 .../hbase/regionserver/TestBlocksRead.html      |     2 +-
 .../regionserver/TestCompactSplitThread.html    |   155 +-
 .../TestEndToEndSplitTransaction.html           |   937 +-
 .../hbase/regionserver/TestQueryMatcher.html    |   630 +-
 .../TestScannerRetriableFailure.html            |   222 +-
 .../hbase/regionserver/TestStoreFile.html       |  2025 ++-
 .../regionserver/wal/SequenceFileLogWriter.html |   371 +-
 .../wal/TestCustomWALCellCodec.html             |    83 +-
 .../regionserver/wal/TestLogRollPeriod.html     |   283 +-
 .../rest/model/TestNamespacesInstanceModel.html |   143 +-
 .../hbase/security/TestEncryptionUtil.html      |   209 +-
 .../security/access/TestAccessController.html   |  5346 +++---
 .../hbase/snapshot/MobSnapshotTestingUtils.html |   239 +-
 .../hbase/thrift/TestThriftHttpServer.html      |   323 +-
 .../hadoop/hbase/util/MultiThreadedAction.html  |  1019 +-
 .../hbase/util/MultiThreadedUpdaterWithACL.html |   475 +-
 .../hadoop/hbase/util/TestHBaseFsckTwoRS.html   |   949 +-
 .../util/hbck/OfflineMetaRebuildTestCore.html   |   489 +-
 .../hadoop/hbase/zookeeper/TestZKConfig.html    |   197 +-
 .../hadoop/hbase/zookeeper/TestZKUtil.html      |    63 +-
 xref/org/apache/hadoop/hbase/HConstants.html    |  2356 +--
 .../codec/prefixtree/PrefixTreeSeeker.html      |  1113 +-
 .../io/encoding/BufferedDataBlockEncoder.html   |  2228 +--
 .../hbase/io/encoding/DataBlockEncoder.html     |   313 +-
 .../hadoop/hbase/io/hfile/CacheConfig.html      |  1184 +-
 .../org/apache/hadoop/hbase/io/hfile/HFile.html |  1467 +-
 .../hadoop/hbase/io/hfile/HFileBlock.html       |  3870 +++--
 .../hadoop/hbase/io/hfile/HFileContext.html     |    26 +-
 .../hadoop/hbase/io/hfile/HFileReaderImpl.html  |   815 +-
 .../hadoop/hbase/io/hfile/HFileScanner.html     |   275 +-
 .../hadoop/hbase/io/hfile/HFileWriterImpl.html  |  1403 +-
 .../hbase/io/hfile/bucket/BucketAllocator.html  |   571 +-
 .../hbase/io/hfile/bucket/BucketCache.html      |  2205 +--
 .../hbase/mapreduce/TableInputFormatBase.html   |  1245 +-
 .../hadoop/hbase/master/SplitLogManager.html    |  1535 +-
 .../hadoop/hbase/regionserver/HStore.html       |  1279 +-
 .../hbase/regionserver/RegionSplitPolicy.html   |   121 +-
 .../CompactionThroughputControllerFactory.html  |    94 +-
 .../security/access/SecureBulkLoadEndpoint.html |   338 +-
 .../hadoop/hbase/security/token/TokenUtil.html  |   517 +-
 .../hbase/tmpl/common/TaskMonitorTmpl.html      |    60 +-
 .../hbase/tmpl/common/TaskMonitorTmplImpl.html  |    16 +-
 .../hbase/tmpl/master/MasterStatusTmpl.html     |   240 +-
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |    64 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.html   |   120 +-
 .../tmpl/regionserver/RSStatusTmplImpl.html     |    32 +-
 597 files changed, 142397 insertions(+), 140576 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/acid-semantics.html
----------------------------------------------------------------------
diff --git a/acid-semantics.html b/acid-semantics.html
index 01d14fd..c8dc0f1 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20160303" />
+    <meta name="Date-Revision-yyyymmdd" content="20160304" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) ACID Properties
@@ -600,7 +600,7 @@ under the License. -->
                         <a href="http://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2016-03-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2016-03-04</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/apache_hbase_reference_guide.pdf
----------------------------------------------------------------------
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 86027fb..7868f39 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160303151707+00'00')
-/ModDate (D:20160303151707+00'00')
+/CreationDate (D:20160304151829+00'00')
+/ModDate (D:20160304151829+00'00')
 >>
 endobj
 2 0 obj

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/apache_hbase_reference_guide.pdfmarks
----------------------------------------------------------------------
diff --git a/apache_hbase_reference_guide.pdfmarks b/apache_hbase_reference_guide.pdfmarks
index 37f53fa..77eb105 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20160303151856)
-  /CreationDate (D:20160303151856)
+  /ModDate (D:20160304152019)
+  /CreationDate (D:20160304152019)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark


[33/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
index be27b86..9e2d5bc 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html
@@ -100,7 +100,7 @@
 <li class="blockList">
 <dl>
 <dt>All Implemented Interfaces:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>, <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a></dd>
+<dd><a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true" title="class or interface in java.lang">AutoCloseable</a>, <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>, <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a></dd>
 </dl>
 <dl>
 <dt>Enclosing class:</dt>
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>protected static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1633">HFileReaderImpl.EncodedScanner</a>
+<pre>protected static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1636">HFileReaderImpl.EncodedScanner</a>
 extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileReaderImpl.HFileScannerImpl</a></pre>
 <div class="block">Scanner that operates on encoded data blocks.</div>
 </li>
@@ -298,7 +298,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>decodingCtx</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1634">decodingCtx</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1637">decodingCtx</a></pre>
 </li>
 </ul>
 <a name="seeker">
@@ -307,7 +307,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>seeker</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder.EncodedSeeker</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1635">seeker</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder.EncodedSeeker</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1638">seeker</a></pre>
 </li>
 </ul>
 <a name="dataBlockEncoder">
@@ -316,7 +316,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockListLast">
 <li class="blockList">
 <h4>dataBlockEncoder</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1636">dataBlockEncoder</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1639">dataBlockEncoder</a></pre>
 </li>
 </ul>
 </li>
@@ -333,7 +333,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileReaderImpl.EncodedScanner</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1638">HFileReaderImpl.EncodedScanner</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;reader,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1641">HFileReaderImpl.EncodedScanner</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;reader,
                               boolean&nbsp;cacheBlocks,
                               boolean&nbsp;pread,
                               boolean&nbsp;isCompaction,
@@ -354,7 +354,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>isSeeked</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1649">isSeeked</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1652">isSeeked</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#isSeeked()">isSeeked</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a></code></dd>
@@ -371,7 +371,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>setNonSeekedState</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1653">setNonSeekedState</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1656">setNonSeekedState</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html#setNonSeekedState()">setNonSeekedState</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileReaderImpl.HFileScannerImpl</a></code></dd>
@@ -384,7 +384,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>updateCurrentBlock</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1665">updateCurrentBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;newBlock)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1668">updateCurrentBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;newBlock)
                            throws <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CorruptHFileException.html" title="class in org.apache.hadoop.hbase.io.hfile">CorruptHFileException</a></pre>
 <div class="block">Updates the current block to be the given <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock</code></a>. Seeks to
  the the first key/value pair.</div>
@@ -402,7 +402,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>getEncodedBuffer</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1687">getEncodedBuffer</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;newBlock)</pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/nio/ByteBuff.html" title="class in org.apache.hadoop.hbase.nio">ByteBuff</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1690">getEncodedBuffer</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;newBlock)</pre>
 </li>
 </ul>
 <a name="processFirstDataBlock()">
@@ -411,7 +411,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>processFirstDataBlock</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1697">processFirstDataBlock</a>()
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1700">processFirstDataBlock</a>()
                                  throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
@@ -426,7 +426,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>next</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1703">next</a>()
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1706">next</a>()
              throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from class:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html#next()">HFileReaderImpl.HFileScannerImpl</a></code></strong></div>
 <div class="block">Go to the next key/value in the block section. Loads the next block if
@@ -448,7 +448,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>getKey</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1718">getKey</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1721">getKey</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#getKey()">HFileScanner</a></code></strong></div>
 <div class="block">Gets the current key in the form of a cell. You must call
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>HFileScanner.seekTo(Cell)</code></a> before this method.</div>
@@ -466,7 +466,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>getValue</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1724">getValue</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1727">getValue</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#getValue()">HFileScanner</a></code></strong></div>
 <div class="block">Gets a buffer view to the current value.  You must call
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>HFileScanner.seekTo(Cell)</code></a> before this method.</div>
@@ -485,7 +485,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>getCell</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1730">getCell</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1733">getCell</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#getCell()">getCell</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a></code></dd>
@@ -500,7 +500,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>getKeyString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1738">getKeyString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1741">getKeyString</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#getKeyString()">HFileScanner</a></code></strong></div>
 <div class="block">Convenience method to get a copy of the key as a string - interpreting the
  bytes as UTF8. You must call <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>HFileScanner.seekTo(Cell)</code></a> before this method.</div>
@@ -518,7 +518,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1743">getValueString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1746">getValueString</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#getValueString()">HFileScanner</a></code></strong></div>
 <div class="block">Convenience method to get a copy of the value as a string - interpreting
  the bytes as UTF8. You must call <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>HFileScanner.seekTo(Cell)</code></a> before this method.</div>
@@ -536,7 +536,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>assertValidSeek</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1748">assertValidSeek</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1751">assertValidSeek</a>()</pre>
 </li>
 </ul>
 <a name="getFirstKeyCellInBlock(org.apache.hadoop.hbase.io.hfile.HFileBlock)">
@@ -545,7 +545,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>getFirstKeyCellInBlock</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1754">getFirstKeyCellInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;curBlock)</pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1757">getFirstKeyCellInBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;curBlock)</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html#getFirstKeyCellInBlock(org.apache.hadoop.hbase.io.hfile.HFileBlock)">getFirstKeyCellInBlock</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileReaderImpl.HFileScannerImpl</a></code></dd>
@@ -558,7 +558,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockList">
 <li class="blockList">
 <h4>loadBlockAndSeekToKey</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1759">loadBlockAndSeekToKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;seekToBlock,
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1762">loadBlockAndSeekToKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;seekToBlock,
                         <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;nextIndexedKey,
                         boolean&nbsp;rewind,
                         <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key,
@@ -577,7 +577,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileReaderI
 <ul class="blockListLast">
 <li class="blockList">
 <h4>compareKey</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1771">compareKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.EncodedScanner.html#line.1774">compareKey</a>(<a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="class in org.apache.hadoop.hbase">CellComparator</a>&nbsp;comparator,
              <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;key)</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
index c305776..bdc12c8 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.HFileScannerImpl.html
@@ -95,7 +95,7 @@
 <li class="blockList">
 <dl>
 <dt>All Implemented Interfaces:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>, <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a></dd>
+<dd><a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true" title="class or interface in java.lang">AutoCloseable</a>, <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>, <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a></dd>
 </dl>
 <dl>
 <dt>Direct Known Subclasses:</dt>
@@ -789,6 +789,10 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScan
 <div class="block">Close this HFile scanner and do necessary cleanup.</div>
 <dl>
 <dt><strong>Specified by:</strong></dt>
+<dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true#close()" title="class or interface in java.io">close</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a></code></dd>
+<dt><strong>Specified by:</strong></dt>
+<dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true#close()" title="class or interface in java.lang">close</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true" title="class or interface in java.lang">AutoCloseable</a></code></dd>
+<dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#close()">close</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a></code></dd>
 </dl>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
index 856561e..19c4c44 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html
@@ -1395,7 +1395,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>hasMVCCInfo</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1554">hasMVCCInfo</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1557">hasMVCCInfo</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#hasMVCCInfo()">hasMVCCInfo</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a></code></dd>
@@ -1408,7 +1408,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>validateBlockType</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1567">validateBlockType</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1570">validateBlockType</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block,
                      <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;expectedBlockType)
                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Compares the actual type of a block retrieved from cache or disk with its
@@ -1427,7 +1427,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastKey</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1590">getLastKey</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1593">getLastKey</a>()</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#getLastKey()">getLastKey</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a></code></dd>
@@ -1442,7 +1442,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>midkey</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1600">midkey</a>()
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1603">midkey</a>()
             throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -1459,7 +1459,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1605">close</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1608">close</a>()
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -1476,7 +1476,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1609">close</a>(boolean&nbsp;evictOnClose)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1612">close</a>(boolean&nbsp;evictOnClose)
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#close(boolean)">HFile.Reader</a></code></strong></div>
 <div class="block">Close method with optional evictOnClose</div>
@@ -1493,7 +1493,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getEffectiveEncodingInCache</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1621">getEffectiveEncodingInCache</a>(boolean&nbsp;isCompaction)</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1624">getEffectiveEncodingInCache</a>(boolean&nbsp;isCompaction)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#getEffectiveEncodingInCache(boolean)">getEffectiveEncodingInCache</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a></code></dd>
@@ -1506,7 +1506,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncachedBlockReader</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1626">getUncachedBlockReader</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1629">getUncachedBlockReader</a>()</pre>
 <div class="block">For testing</div>
 <dl>
 <dt><strong>Specified by:</strong></dt>
@@ -1520,7 +1520,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getGeneralBloomFilterMetadata</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1781">getGeneralBloomFilterMetadata</a>()
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1784">getGeneralBloomFilterMetadata</a>()
                                         throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns a buffer with the Bloom filter metadata. The caller takes
  ownership of the buffer.</div>
@@ -1537,7 +1537,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getDeleteBloomFilterMetadata</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1786">getDeleteBloomFilterMetadata</a>()
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1789">getDeleteBloomFilterMetadata</a>()
                                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#getDeleteBloomFilterMetadata()">HFile.Reader</a></code></strong></div>
 <div class="block">Retrieves delete family Bloom filter metadata as appropriate for each
@@ -1556,7 +1556,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getBloomFilterMetadata</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1790">getBloomFilterMetadata</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1793">getBloomFilterMetadata</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)
                                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
@@ -1568,7 +1568,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>isFileInfoLoaded</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1804">isFileInfoLoaded</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1807">isFileInfoLoaded</a>()</pre>
 </li>
 </ul>
 <a name="getFileContext()">
@@ -1577,7 +1577,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1809">getFileContext</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1812">getFileContext</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#getFileContext()">HFile.Reader</a></code></strong></div>
 <div class="block">Return the file context of the HFile this reader belongs to</div>
 <dl>
@@ -1592,7 +1592,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>prefetchComplete</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1818">prefetchComplete</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1821">prefetchComplete</a>()</pre>
 <div class="block">Returns false if block prefetching was requested for this file and has
  not completed, true otherwise</div>
 <dl>
@@ -1607,7 +1607,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>createHFileContext</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1822">createHFileContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;fsdis,
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1825">createHFileContext</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;fsdis,
                               long&nbsp;fileSize,
                               <a href="../../../../../../org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a>&nbsp;hfs,
                               org.apache.hadoop.fs.Path&nbsp;path,
@@ -1623,7 +1623,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1871">getScanner</a>(boolean&nbsp;cacheBlocks,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1874">getScanner</a>(boolean&nbsp;cacheBlocks,
                       boolean&nbsp;pread)</pre>
 <div class="block">Create a Scanner on this file. No seeks or reads are done on creation. Call
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>HFileScanner.seekTo(Cell)</code></a> to position an start the read. There is
@@ -1644,7 +1644,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1890">getScanner</a>(boolean&nbsp;cacheBlocks,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1893">getScanner</a>(boolean&nbsp;cacheBlocks,
                       boolean&nbsp;pread,
                       boolean&nbsp;isCompaction)</pre>
 <div class="block">Create a Scanner on this file. No seeks or reads are done on creation. Call
@@ -1665,7 +1665,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Rea
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMajorVersion</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1898">getMajorVersion</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.html#line.1901">getMajorVersion</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
index 0ce70b4..1b3e99a 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileScanner.html
@@ -87,7 +87,7 @@
 <li class="blockList">
 <dl>
 <dt>All Superinterfaces:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a></dd>
+<dd><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true" title="class or interface in java.lang">AutoCloseable</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a></dd>
 </dl>
 <dl>
 <dt>All Known Implementing Classes:</dt>
@@ -96,8 +96,8 @@
 <hr>
 <br>
 <pre><a href="../../../../../../org/apache/hadoop/hbase/classification/InterfaceAudience.Private.html" title="annotation in org.apache.hadoop.hbase.classification">@InterfaceAudience.Private</a>
-public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.41">HFileScanner</a>
-extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a></pre>
+public interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.42">HFileScanner</a>
+extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html" title="interface in org.apache.hadoop.hbase.regionserver">Shipper</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a></pre>
 <div class="block">A scanner allows you to position yourself within a HFile and
  scan through it.  It allows you to reposition yourself as well.
 
@@ -233,7 +233,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>seekTo</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.58">seekTo</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.59">seekTo</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
            throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">SeekTo or just before the passed <code>cell</code>.  Examine the return
  code to figure whether we found the cell or not.
@@ -258,7 +258,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>reseekTo</h4>
-<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.80">reseekTo</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
+<pre>int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.81">reseekTo</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
              throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Reseek to or just before the passed <code>cell</code>. Similar to seekTo
  except that this can be called even if the scanner is not at the beginning
@@ -287,7 +287,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>seekBefore</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.92">seekBefore</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.93">seekBefore</a>(<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell)
                    throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Consider the cell stream of all the cells in the file,
  <code>c[0] .. c[n]</code>, where there are n cells in the file.</div>
@@ -306,7 +306,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>seekTo</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.100">seekTo</a>()
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.101">seekTo</a>()
                throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Positions this scanner at the start of the file.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>False if empty file; i.e. a call to next would return false and
@@ -321,7 +321,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>next</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.106">next</a>()
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.107">next</a>()
              throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Scans to the next entry in the file.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>Returns false if you are at the end otherwise true if more in file.</dd>
@@ -335,7 +335,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>getKey</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.112">getKey</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.113">getKey</a>()</pre>
 <div class="block">Gets the current key in the form of a cell. You must call
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>seekTo(Cell)</code></a> before this method.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>gets the current key as a Cell.</dd></dl>
@@ -347,7 +347,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>getValue</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.120">getValue</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.121">getValue</a>()</pre>
 <div class="block">Gets a buffer view to the current value.  You must call
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>seekTo(Cell)</code></a> before this method.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>byte buffer for the value. The limit is set to the value size, and
@@ -360,7 +360,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>getCell</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.124">getCell</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.125">getCell</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>Instance of <a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase"><code>Cell</code></a>.</dd></dl>
 </li>
 </ul>
@@ -370,7 +370,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>getKeyString</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.130">getKeyString</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.131">getKeyString</a>()</pre>
 <div class="block">Convenience method to get a copy of the key as a string - interpreting the
  bytes as UTF8. You must call <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>seekTo(Cell)</code></a> before this method.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>key as a string</dd></dl>
@@ -382,7 +382,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>getValueString</h4>
-<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.136">getValueString</a>()</pre>
+<pre><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.137">getValueString</a>()</pre>
 <div class="block">Convenience method to get a copy of the value as a string - interpreting
  the bytes as UTF8. You must call <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>seekTo(Cell)</code></a> before this method.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>value as a string</dd></dl>
@@ -394,7 +394,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>getReader</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.140">getReader</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.141">getReader</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>Reader that underlies this Scanner instance.</dd></dl>
 </li>
 </ul>
@@ -404,7 +404,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>isSeeked</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.146">isSeeked</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.147">isSeeked</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>True is scanner has had one of the seek calls invoked; i.e.
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekBefore(org.apache.hadoop.hbase.Cell)"><code>seekBefore(Cell)</code></a> or <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo()"><code>seekTo()</code></a> or <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html#seekTo(org.apache.hadoop.hbase.Cell)"><code>seekTo(Cell)</code></a>.
  Otherwise returns false.</dd></dl>
@@ -416,7 +416,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockList">
 <li class="blockList">
 <h4>getNextIndexedKey</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.151">getNextIndexedKey</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.152">getNextIndexedKey</a>()</pre>
 <dl><dt><span class="strong">Returns:</span></dt><dd>the next key in the index (the key to seek to the next block)</dd></dl>
 </li>
 </ul>
@@ -426,8 +426,14 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/regionserver/Shipper.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>close</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.156">close</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileScanner.html#line.157">close</a>()</pre>
 <div class="block">Close this HFile scanner and do necessary cleanup.</div>
+<dl>
+<dt><strong>Specified by:</strong></dt>
+<dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true#close()" title="class or interface in java.lang">close</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true" title="class or interface in java.lang">AutoCloseable</a></code></dd>
+<dt><strong>Specified by:</strong></dt>
+<dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true#close()" title="class or interface in java.io">close</a></code>&nbsp;in interface&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a></code></dd>
+</dl>
 </li>
 </ul>
 </li>


[36/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
index 80e7169..7918a41 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
@@ -232,11 +232,11 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a></code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html#checksumFailures">checksumFailures</a></strong></code>&nbsp;</td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html#CHECKSUM_FAILURES">CHECKSUM_FAILURES</a></strong></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a></code></td>
-<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html#dataBlockReadCnt">dataBlockReadCnt</a></strong></code>&nbsp;</td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html#DATABLOCK_READ_COUNT">DATABLOCK_READ_COUNT</a></strong></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>static int</code></td>
@@ -573,22 +573,22 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFile.DEFAULT_BYTES_PER_CHECKSUM">Constant Field Values</a></dd></dl>
 </li>
 </ul>
-<a name="checksumFailures">
+<a name="CHECKSUM_FAILURES">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>checksumFailures</h4>
-<pre>static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.182">checksumFailures</a></pre>
+<h4>CHECKSUM_FAILURES</h4>
+<pre>static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.183">CHECKSUM_FAILURES</a></pre>
 </li>
 </ul>
-<a name="dataBlockReadCnt">
+<a name="DATABLOCK_READ_COUNT">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>dataBlockReadCnt</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.185">dataBlockReadCnt</a></pre>
+<h4>DATABLOCK_READ_COUNT</h4>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/util/Counter.html" title="class in org.apache.hadoop.hbase.util">Counter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.186">DATABLOCK_READ_COUNT</a></pre>
 </li>
 </ul>
 <a name="FORMAT_VERSION_KEY">
@@ -597,7 +597,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FORMAT_VERSION_KEY</h4>
-<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.319">FORMAT_VERSION_KEY</a></pre>
+<pre>public static final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.320">FORMAT_VERSION_KEY</a></pre>
 <div class="block">The configuration key for HFile version to use for new files</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFile.FORMAT_VERSION_KEY">Constant Field Values</a></dd></dl>
 </li>
@@ -633,7 +633,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getChecksumFailuresCount</h4>
-<pre>public static final&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.191">getChecksumFailuresCount</a>()</pre>
+<pre>public static final&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.192">getChecksumFailuresCount</a>()</pre>
 <div class="block">Number of checksum verification failures. It also
  clears the counter.</div>
 </li>
@@ -644,7 +644,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getFormatVersion</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.321">getFormatVersion</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.322">getFormatVersion</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 <a name="getWriterFactoryNoCache(org.apache.hadoop.conf.Configuration)">
@@ -653,7 +653,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getWriterFactoryNoCache</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.332">getWriterFactoryNoCache</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.333">getWriterFactoryNoCache</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Returns the factory to be used to create <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> writers.
  Disables block cache access for all writers created through the
  returned factory.</div>
@@ -665,7 +665,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getWriterFactory</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.342">getWriterFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.343">getWriterFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                    <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
 <div class="block">Returns the factory to be used to create <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> writers</div>
 </li>
@@ -676,7 +676,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>pickReaderVersion</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.495">pickReaderVersion</a>(org.apache.hadoop.fs.Path&nbsp;path,
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.496">pickReaderVersion</a>(org.apache.hadoop.fs.Path&nbsp;path,
                              <a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;fsdis,
                              long&nbsp;size,
                              <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf,
@@ -697,7 +697,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.532">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.533">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                         org.apache.hadoop.fs.Path&nbsp;path,
                         <a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;fsdis,
                         long&nbsp;size,
@@ -716,7 +716,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.557">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.558">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                         org.apache.hadoop.fs.Path&nbsp;path,
                         <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf,
                         org.apache.hadoop.conf.Configuration&nbsp;conf)
@@ -733,7 +733,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>createReaderFromStream</h4>
-<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.568">createReaderFromStream</a>(org.apache.hadoop.fs.Path&nbsp;path,
+<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.569">createReaderFromStream</a>(org.apache.hadoop.fs.Path&nbsp;path,
                                   org.apache.hadoop.fs.FSDataInputStream&nbsp;fsdis,
                                   long&nbsp;size,
                                   <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf,
@@ -750,7 +750,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isHFileFormat</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.582">isHFileFormat</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.583">isHFileFormat</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                     org.apache.hadoop.fs.Path&nbsp;path)
                              throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns true if the specified file has a valid HFile Trailer.</div>
@@ -766,7 +766,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isHFileFormat</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.593">isHFileFormat</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.594">isHFileFormat</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                     org.apache.hadoop.fs.FileStatus&nbsp;fileStatus)
                              throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns true if the specified file has a valid HFile Trailer.</div>
@@ -782,7 +782,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>isReservedFileInfoKey</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.823">isReservedFileInfoKey</a>(byte[]&nbsp;key)</pre>
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.824">isReservedFileInfoKey</a>(byte[]&nbsp;key)</pre>
 <div class="block">Return true if the given file info key is reserved for internal use.</div>
 </li>
 </ul>
@@ -792,7 +792,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getSupportedCompressionAlgorithms</h4>
-<pre>public static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.839">getSupportedCompressionAlgorithms</a>()</pre>
+<pre>public static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.840">getSupportedCompressionAlgorithms</a>()</pre>
 <div class="block">Get names of supported compression algorithms. The names are acceptable by
  HFile.Writer.</div>
 <dl><dt><span class="strong">Returns:</span></dt><dd>Array of strings, each represents a supported compression
@@ -810,7 +810,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>longToInt</h4>
-<pre>static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.848">longToInt</a>(long&nbsp;l)</pre>
+<pre>static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.849">longToInt</a>(long&nbsp;l)</pre>
 </li>
 </ul>
 <a name="getStoreFiles(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path)">
@@ -819,7 +819,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreFiles</h4>
-<pre>static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.863">getStoreFiles</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>static&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.864">getStoreFiles</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                             org.apache.hadoop.fs.Path&nbsp;regionDir)
                                               throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns all HFiles belonging to the given region directory. Could return an
@@ -836,7 +836,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>checkFormatVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.890">checkFormatVersion</a>(int&nbsp;version)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.891">checkFormatVersion</a>(int&nbsp;version)
                                throws <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/IllegalArgumentException.html?is-external=true" title="class or interface in java.lang">IllegalArgumentException</a></pre>
 <div class="block">Checks the given <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> format version, and throws an exception if
  invalid. Note that if the version number comes from an input file and has
@@ -853,7 +853,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>checkHFileVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.900">checkHFileVersion</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.901">checkHFileVersion</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
 </li>
 </ul>
 <a name="main(java.lang.String[])">
@@ -862,7 +862,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>main</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.911">main</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.912">main</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
                  throws <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></code></dd></dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
index e3fc4b0..9eacc6f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html
@@ -91,7 +91,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1261">HFileBlock.BlockIterator</a></pre>
+<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1301">HFileBlock.BlockIterator</a></pre>
 <div class="block">An interface allowing to iterate <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFileBlock</code></a>s.</div>
 </li>
 </ul>
@@ -145,7 +145,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>nextBlock</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#line.1266">nextBlock</a>()
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#line.1306">nextBlock</a>()
                      throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get the next block, or null if there are no more blocks to iterate.</div>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -158,7 +158,7 @@
 <ul class="blockListLast">
 <li class="blockList">
 <h4>nextBlockWithBlockType</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#line.1272">nextBlockWithBlockType</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#line.1312">nextBlockWithBlockType</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)
                                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Similar to <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html#nextBlock()"><code>nextBlock()</code></a> but checks block type, throws an
  exception if incorrect, and returns the HFile block</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
index 98850fd..12f0ddc 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html
@@ -91,7 +91,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1244">HFileBlock.BlockWritable</a></pre>
+<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1284">HFileBlock.BlockWritable</a></pre>
 <div class="block">Something that can be written into a block.</div>
 </li>
 </ul>
@@ -144,7 +144,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockType</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html#line.1247">getBlockType</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html#line.1287">getBlockType</a>()</pre>
 <div class="block">The type of block this data should use.</div>
 </li>
 </ul>
@@ -154,7 +154,7 @@
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writeToBlock</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html#line.1255">writeToBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>&nbsp;out)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockWritable.html#line.1295">writeToBlock</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>&nbsp;out)
                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the block to the provided stream. Must not write any magic
  records.</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
index 28c7be6..61d6f98 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html
@@ -95,7 +95,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1276">HFileBlock.FSReader</a></pre>
+<pre>static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1316">HFileBlock.FSReader</a></pre>
 <div class="block">A full-fledged reader with iteration ability.</div>
 </li>
 </ul>
@@ -179,7 +179,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>readBlockData</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1289">readBlockData</a>(long&nbsp;offset,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1329">readBlockData</a>(long&nbsp;offset,
                        long&nbsp;onDiskSize,
                        int&nbsp;uncompressedSize,
                        boolean&nbsp;pread)
@@ -200,7 +200,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>blockRange</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1301">blockRange</a>(long&nbsp;startOffset,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1341">blockRange</a>(long&nbsp;startOffset,
                                   long&nbsp;endOffset)</pre>
 <div class="block">Creates a block iterator over the given portion of the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>.
  The iterator returns blocks starting with offset such that offset &lt;=
@@ -215,7 +215,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>closeStreams</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1304">closeStreams</a>()
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1344">closeStreams</a>()
                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Closes the backing streams</div>
 <dl><dt><span class="strong">Throws:</span></dt>
@@ -228,7 +228,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockDecodingContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1307">getBlockDecodingContext</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1347">getBlockDecodingContext</a>()</pre>
 <div class="block">Get a decoder for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a> blocks from this file.</div>
 </li>
 </ul>
@@ -238,7 +238,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>getDefaultBlockDecodingContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1310">getDefaultBlockDecodingContext</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1350">getDefaultBlockDecodingContext</a>()</pre>
 <div class="block">Get the default decoder for blocks from this file.</div>
 </li>
 </ul>
@@ -248,7 +248,7 @@
 <ul class="blockList">
 <li class="blockList">
 <h4>setIncludesMemstoreTS</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1312">setIncludesMemstoreTS</a>(boolean&nbsp;includesMemstoreTS)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1352">setIncludesMemstoreTS</a>(boolean&nbsp;includesMemstoreTS)</pre>
 </li>
 </ul>
 <a name="setDataBlockEncoder(org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder)">
@@ -257,7 +257,7 @@
 <ul class="blockListLast">
 <li class="blockList">
 <h4>setDataBlockEncoder</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1313">setDataBlockEncoder</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;encoder)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#line.1353">setDataBlockEncoder</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;encoder)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
index 5e375ba..3e71062 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html
@@ -103,7 +103,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1327">HFileBlock.FSReaderImpl</a>
+<pre>static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1373">HFileBlock.FSReaderImpl</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a></pre>
 <div class="block">Reads version 2 blocks from the filesystem.</div>
@@ -332,7 +332,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>streamWrapper</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1330">streamWrapper</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1376">streamWrapper</a></pre>
 <div class="block">The file system stream of the underlying <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> that
  does or doesn't do checksum validations in the filesystem</div>
 </li>
@@ -343,7 +343,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>encodedBlockDecodingCtx</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1332">encodedBlockDecodingCtx</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1378">encodedBlockDecodingCtx</a></pre>
 </li>
 </ul>
 <a name="defaultDecodingCtx">
@@ -352,7 +352,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>defaultDecodingCtx</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1335">defaultDecodingCtx</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.html" title="class in org.apache.hadoop.hbase.io.encoding">HFileBlockDefaultDecodingContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1381">defaultDecodingCtx</a></pre>
 <div class="block">Default context used when BlockType != <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a>.</div>
 </li>
 </ul>
@@ -362,7 +362,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>prefetchedHeaderForThread</h4>
-<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/ThreadLocal.html?is-external=true" title="class or interface in java.lang">ThreadLocal</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.PrefetchedHeader</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1337">prefetchedHeaderForThread</a></pre>
+<pre>private&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/ThreadLocal.html?is-external=true" title="class or interface in java.lang">ThreadLocal</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock.PrefetchedHeader</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1383">prefetchedHeaderForThread</a></pre>
 </li>
 </ul>
 <a name="fileSize">
@@ -371,7 +371,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>fileSize</h4>
-<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1348">fileSize</a></pre>
+<pre>protected&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1394">fileSize</a></pre>
 <div class="block">The size of the file we are reading from, or -1 if unknown.</div>
 </li>
 </ul>
@@ -381,7 +381,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>hdrSize</h4>
-<pre>protected final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1351">hdrSize</a></pre>
+<pre>protected final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1397">hdrSize</a></pre>
 <div class="block">The size of the header</div>
 </li>
 </ul>
@@ -391,7 +391,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>hfs</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1354">hfs</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1400">hfs</a></pre>
 <div class="block">The filesystem used to access data</div>
 </li>
 </ul>
@@ -401,7 +401,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>streamLock</h4>
-<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/Lock.html?is-external=true" title="class or interface in java.util.concurrent.locks">Lock</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1356">streamLock</a></pre>
+<pre>private final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/locks/Lock.html?is-external=true" title="class or interface in java.util.concurrent.locks">Lock</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1402">streamLock</a></pre>
 </li>
 </ul>
 <a name="DEFAULT_BUFFER_SIZE">
@@ -410,7 +410,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_BUFFER_SIZE</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1359">DEFAULT_BUFFER_SIZE</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1405">DEFAULT_BUFFER_SIZE</a></pre>
 <div class="block">The default buffer size for our buffered streams</div>
 <dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFileBlock.FSReaderImpl.DEFAULT_BUFFER_SIZE">Constant Field Values</a></dd></dl>
 </li>
@@ -421,7 +421,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>fileContext</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1361">fileContext</a></pre>
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1407">fileContext</a></pre>
 </li>
 </ul>
 <a name="pathName">
@@ -430,7 +430,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockListLast">
 <li class="blockList">
 <h4>pathName</h4>
-<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1363">pathName</a></pre>
+<pre>protected&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1409">pathName</a></pre>
 </li>
 </ul>
 </li>
@@ -447,12 +447,12 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>HFileBlock.FSReaderImpl</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1365">HFileBlock.FSReaderImpl</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;stream,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1411">HFileBlock.FSReaderImpl</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;stream,
                        long&nbsp;fileSize,
                        <a href="../../../../../../org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a>&nbsp;hfs,
                        org.apache.hadoop.fs.Path&nbsp;path,
                        <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext)
-                        throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+                  throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl><dt><span class="strong">Throws:</span></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd></dl>
 </li>
@@ -463,7 +463,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileBlock.FSReaderImpl</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1386">HFileBlock.FSReaderImpl</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;istream,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1432">HFileBlock.FSReaderImpl</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;istream,
                        long&nbsp;fileSize,
                        <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;fileContext)
                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -487,7 +487,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>blockRange</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1391">blockRange</a>(long&nbsp;startOffset,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.BlockIterator.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.BlockIterator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1437">blockRange</a>(long&nbsp;startOffset,
                                   long&nbsp;endOffset)</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#blockRange(long,%20long)">HFileBlock.FSReader</a></code></strong></div>
 <div class="block">Creates a block iterator over the given portion of the <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>.
@@ -506,7 +506,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>readAtOffset</h4>
-<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1433">readAtOffset</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;istream,
+<pre>protected&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1479">readAtOffset</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;istream,
                byte[]&nbsp;dest,
                int&nbsp;destOffset,
                int&nbsp;size,
@@ -516,7 +516,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
                     throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Does a positional read or a seek and read into the given buffer. Returns
  the on-disk size of the next block, or -1 if it could not be determined.</div>
-<dl><dt><span class="strong">Parameters:</span></dt><dd><code>dest</code> - destination buffer</dd><dd><code>destOffset</code> - offset in the destination buffer</dd><dd><code>size</code> - size of the block to be read</dd><dd><code>peekIntoNextBlock</code> - whether to read the next block's on-disk size</dd><dd><code>fileOffset</code> - position in the stream to read at</dd><dd><code>pread</code> - whether we should do a positional read</dd><dd><code>istream</code> - The input source of data</dd>
+<dl><dt><span class="strong">Parameters:</span></dt><dd><code>dest</code> - destination buffer</dd><dd><code>destOffset</code> - offset into the destination buffer at where to put the bytes we read</dd><dd><code>size</code> - size of read</dd><dd><code>peekIntoNextBlock</code> - whether to read the next block's on-disk size</dd><dd><code>fileOffset</code> - position in the stream to read at</dd><dd><code>pread</code> - whether we should do a positional read</dd><dd><code>istream</code> - The input source of data</dd>
 <dt><span class="strong">Returns:</span></dt><dd>the on-disk size of the next block with header size included, or
          -1 if it could not be determined</dd>
 <dt><span class="strong">Throws:</span></dt>
@@ -529,7 +529,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>readBlockData</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1494">readBlockData</a>(long&nbsp;offset,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1538">readBlockData</a>(long&nbsp;offset,
                        long&nbsp;onDiskSizeWithHeaderL,
                        int&nbsp;uncompressedSize,
                        boolean&nbsp;pread)
@@ -553,7 +553,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>readBlockDataInternal</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1576">readBlockDataInternal</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;is,
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1620">readBlockDataInternal</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;is,
                                long&nbsp;offset,
                                long&nbsp;onDiskSizeWithHeaderL,
                                int&nbsp;uncompressedSize,
@@ -576,7 +576,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>setIncludesMemstoreTS</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1722">setIncludesMemstoreTS</a>(boolean&nbsp;includesMemstoreTS)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1780">setIncludesMemstoreTS</a>(boolean&nbsp;includesMemstoreTS)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#setIncludesMemstoreTS(boolean)">setIncludesMemstoreTS</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a></code></dd>
@@ -589,7 +589,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>setDataBlockEncoder</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1726">setDataBlockEncoder</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;encoder)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1785">setDataBlockEncoder</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;encoder)</pre>
 <dl>
 <dt><strong>Specified by:</strong></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#setDataBlockEncoder(org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder)">setDataBlockEncoder</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a></code></dd>
@@ -602,7 +602,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockDecodingContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1731">getBlockDecodingContext</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1790">getBlockDecodingContext</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#getBlockDecodingContext()">HFileBlock.FSReader</a></code></strong></div>
 <div class="block">Get a decoder for <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html#ENCODED_DATA"><code>BlockType.ENCODED_DATA</code></a> blocks from this file.</div>
 <dl>
@@ -617,7 +617,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>getDefaultBlockDecodingContext</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1736">getDefaultBlockDecodingContext</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.html" title="interface in org.apache.hadoop.hbase.io.encoding">HFileBlockDecodingContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1795">getDefaultBlockDecodingContext</a>()</pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#getDefaultBlockDecodingContext()">HFileBlock.FSReader</a></code></strong></div>
 <div class="block">Get the default decoder for blocks from this file.</div>
 <dl>
@@ -632,7 +632,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>validateBlockChecksum</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1746">validateBlockChecksum</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1805">validateBlockChecksum</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block,
                             long&nbsp;offset,
                             byte[]&nbsp;data,
                             int&nbsp;hdrSize)
@@ -651,7 +651,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockList">
 <li class="blockList">
 <h4>closeStreams</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1753">closeStreams</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1812">closeStreams</a>()
                   throws <a href="http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><strong>Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html#closeStreams()">HFileBlock.FSReader</a></code></strong></div>
 <div class="block">Closes the backing streams</div>
@@ -668,7 +668,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBloc
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1758">toString</a>()</pre>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReaderImpl.html#line.1817">toString</a>()</pre>
 <dl>
 <dt><strong>Overrides:</strong></dt>
 <dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
index bf8460a..1f48a30 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html
@@ -63,13 +63,13 @@
 <li>Nested&nbsp;|&nbsp;</li>
 <li><a href="#field_summary">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor_summary">Constr</a>&nbsp;|&nbsp;</li>
-<li><a href="#methods_inherited_from_class_java.lang.Object">Method</a></li>
+<li><a href="#method_summary">Method</a></li>
 </ul>
 <ul class="subNavList">
 <li>Detail:&nbsp;</li>
 <li><a href="#field_detail">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor_detail">Constr</a>&nbsp;|&nbsp;</li>
-<li>Method</li>
+<li><a href="#method_detail">Method</a></li>
 </ul>
 </div>
 <a name="skip-navbar_top">
@@ -99,7 +99,7 @@
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1320">HFileBlock.PrefetchedHeader</a>
+<pre>private static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.html#line.1360">HFileBlock.PrefetchedHeader</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">We always prefetch the header of the next block, so that we know its
  on-disk size in advance and can read it in one operation.</div>
@@ -161,12 +161,23 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <!--   -->
 </a>
 <h3>Method Summary</h3>
+<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
+<caption><span>Methods</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td class="colLast"><code><strong><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#toString()">toString</a></strong>()</code>&nbsp;</td>
+</tr>
+</table>
 <ul class="blockList">
 <li class="blockList"><a name="methods_inherited_from_class_java.lang.Object">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></h3>
-<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()" title="class or interface in java.lang">clone</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)" title="class or interface in java.lang">equals</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()" title="class or interface in java.lang">finalize</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()" title="class or interface in java.lang">getClass</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()" title="class or interface in java.lang">hashCode</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()" title="class or interface in java.lang">notify</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in java.lang">notifyAll</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()" title="class or interface in java.lang">wait</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)" title="class or interface in java.lang">wait</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)" title="class or interface in java.lang">wait</a></code></li>
+<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()" title="class or interface in java.lang">clone</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)" title="class or interface in java.lang">equals</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()" title="class or interface in java.lang">finalize</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()" title="class or interface in java.lang">getClass</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()" title="class or interface in java.lang">hashCode</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()" title="class or interface in java.lang">notify</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in java.lang">notifyAll</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()" title="class or interface in java.lang">wait</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)" title="class or interface in java.lang">wait</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)" title="class or interface in java.lang">wait</a></code></li>
 </ul>
 </li>
 </ul>
@@ -188,7 +199,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>offset</h4>
-<pre>long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1321">offset</a></pre>
+<pre>long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1361">offset</a></pre>
 </li>
 </ul>
 <a name="header">
@@ -197,7 +208,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockList">
 <li class="blockList">
 <h4>header</h4>
-<pre>byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1322">header</a></pre>
+<pre>byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1362">header</a></pre>
 </li>
 </ul>
 <a name="buf">
@@ -206,7 +217,7 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>buf</h4>
-<pre>final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1323">buf</a></pre>
+<pre>final&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1363">buf</a></pre>
 </li>
 </ul>
 </li>
@@ -223,7 +234,28 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileBlock.PrefetchedHeader</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1320">HFileBlock.PrefetchedHeader</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1360">HFileBlock.PrefetchedHeader</a>()</pre>
+</li>
+</ul>
+</li>
+</ul>
+<!-- ============ METHOD DETAIL ========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method_detail">
+<!--   -->
+</a>
+<h3>Method Detail</h3>
+<a name="toString()">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>toString</h4>
+<pre>public&nbsp;<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.PrefetchedHeader.html#line.1365">toString</a>()</pre>
+<dl>
+<dt><strong>Overrides:</strong></dt>
+<dd><code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
+</dl>
 </li>
 </ul>
 </li>
@@ -280,13 +312,13 @@ extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 <li>Nested&nbsp;|&nbsp;</li>
 <li><a href="#field_summary">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor_summary">Constr</a>&nbsp;|&nbsp;</li>
-<li><a href="#methods_inherited_from_class_java.lang.Object">Method</a></li>
+<li><a href="#method_summary">Method</a></li>
 </ul>
 <ul class="subNavList">
 <li>Detail:&nbsp;</li>
 <li><a href="#field_detail">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor_detail">Constr</a>&nbsp;|&nbsp;</li>
-<li>Method</li>
+<li><a href="#method_detail">Method</a></li>
 </ul>
 </div>
 <a name="skip-navbar_bottom">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
index fa592b2..e3bb0a4 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html
@@ -108,7 +108,7 @@
 </dl>
 <hr>
 <br>
-<pre>private static enum <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.787">HFileBlock.Writer.State</a>
+<pre>private static enum <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.html#line.823">HFileBlock.Writer.State</a>
 extends <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&gt;</pre>
 </li>
 </ul>
@@ -199,7 +199,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>INIT</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.788">INIT</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.824">INIT</a></pre>
 </li>
 </ul>
 <a name="WRITING">
@@ -208,7 +208,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>WRITING</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.789">WRITING</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.825">WRITING</a></pre>
 </li>
 </ul>
 <a name="BLOCK_READY">
@@ -217,7 +217,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BLOCK_READY</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.790">BLOCK_READY</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.826">BLOCK_READY</a></pre>
 </li>
 </ul>
 </li>
@@ -234,7 +234,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.787">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.823">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -251,7 +251,7 @@ for (HFileBlock.Writer.State c : HFileBlock.Writer.State.values())
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.787">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile">HFileBlock.Writer.State</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html#line.823">valueOf</a>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 


[07/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
index 8fd15a0..da22771 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
@@ -186,741 +186,742 @@
 <span class="sourceLineNo">178</span>   * The number of bytes per checksum.<a name="line.178"></a>
 <span class="sourceLineNo">179</span>   */<a name="line.179"></a>
 <span class="sourceLineNo">180</span>  public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  // For measuring number of checksum failures<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  static final Counter checksumFailures = new Counter();<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  // for test purpose<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static final Counter dataBlockReadCnt = new Counter();<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Number of checksum verification failures. It also<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * clears the counter.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  public static final long getChecksumFailuresCount() {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    long count = checksumFailures.get();<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    checksumFailures.set(0);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    return count;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /** API required to write an {@link HFile} */<a name="line.197"></a>
-<span class="sourceLineNo">198</span>  public interface Writer extends Closeable {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    /** Add an element to the file info map. */<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>    void append(Cell cell) throws IOException;<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>    /** @return the path to this {@link HFile} */<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    Path getPath();<a name="line.208"></a>
-<span class="sourceLineNo">209</span><a name="line.209"></a>
-<span class="sourceLineNo">210</span>    /**<a name="line.210"></a>
-<span class="sourceLineNo">211</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.211"></a>
-<span class="sourceLineNo">212</span>     * a compound Bloom filter writer.<a name="line.212"></a>
-<span class="sourceLineNo">213</span>     */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    /**<a name="line.223"></a>
-<span class="sourceLineNo">224</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.224"></a>
-<span class="sourceLineNo">225</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.225"></a>
-<span class="sourceLineNo">226</span>     * in HFile version 1 and version 2.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>     */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>    /**<a name="line.230"></a>
-<span class="sourceLineNo">231</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.231"></a>
-<span class="sourceLineNo">232</span>     * HFile V2.<a name="line.232"></a>
-<span class="sourceLineNo">233</span>     */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>    /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>     * Return the file context for the HFile this writer belongs to<a name="line.237"></a>
-<span class="sourceLineNo">238</span>     */<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    HFileContext getFileContext();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * we want to be able to swap writer implementations.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static class WriterFactory {<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    protected final Configuration conf;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    protected final CacheConfig cacheConf;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    protected FileSystem fs;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    protected Path path;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    protected FSDataOutputStream ostream;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    protected CellComparator comparator = <a name="line.252"></a>
-<span class="sourceLineNo">253</span>        CellComparator.COMPARATOR;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    protected InetSocketAddress[] favoredNodes;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    private HFileContext fileContext;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    protected boolean shouldDropBehind = false;<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.conf = conf;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.cacheConf = cacheConf;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span><a name="line.262"></a>
-<span class="sourceLineNo">263</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      Preconditions.checkNotNull(fs);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(path);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      this.fs = fs;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.path = path;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return this;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      Preconditions.checkNotNull(ostream);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      this.ostream = ostream;<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return this;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      Preconditions.checkNotNull(comparator);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      this.comparator = comparator;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      return this;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      // Deliberately not checking for null here.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      this.favoredNodes = favoredNodes;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return this;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      this.fileContext = fileContext;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      return this;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>      return this;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  // For measuring number of checksum failures<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  static final Counter CHECKSUM_FAILURES = new Counter();<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>  // For tests. Gets incremented when we read a block whether from HDFS or from Cache.<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  public static final Counter DATABLOCK_READ_COUNT = new Counter();<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /**<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * Number of checksum verification failures. It also<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * clears the counter.<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final long getChecksumFailuresCount() {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    long count = CHECKSUM_FAILURES.get();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    CHECKSUM_FAILURES.set(0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    return count;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  /** API required to write an {@link HFile} */<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  public interface Writer extends Closeable {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    /** Add an element to the file info map. */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>    void append(Cell cell) throws IOException;<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>    /** @return the path to this {@link HFile} */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    Path getPath();<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.212"></a>
+<span class="sourceLineNo">213</span>     * a compound Bloom filter writer.<a name="line.213"></a>
+<span class="sourceLineNo">214</span>     */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span><a name="line.216"></a>
+<span class="sourceLineNo">217</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>    /**<a name="line.224"></a>
+<span class="sourceLineNo">225</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.225"></a>
+<span class="sourceLineNo">226</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.226"></a>
+<span class="sourceLineNo">227</span>     * in HFile version 1 and version 2.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>     */<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    /**<a name="line.231"></a>
+<span class="sourceLineNo">232</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.232"></a>
+<span class="sourceLineNo">233</span>     * HFile V2.<a name="line.233"></a>
+<span class="sourceLineNo">234</span>     */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    /**<a name="line.237"></a>
+<span class="sourceLineNo">238</span>     * Return the file context for the HFile this writer belongs to<a name="line.238"></a>
+<span class="sourceLineNo">239</span>     */<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    HFileContext getFileContext();<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /**<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * we want to be able to swap writer implementations.<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  public static class WriterFactory {<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    protected final Configuration conf;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    protected final CacheConfig cacheConf;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    protected FileSystem fs;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    protected Path path;<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    protected FSDataOutputStream ostream;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    protected CellComparator comparator = <a name="line.253"></a>
+<span class="sourceLineNo">254</span>        CellComparator.COMPARATOR;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    protected InetSocketAddress[] favoredNodes;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    private HFileContext fileContext;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    protected boolean shouldDropBehind = false;<a name="line.257"></a>
+<span class="sourceLineNo">258</span><a name="line.258"></a>
+<span class="sourceLineNo">259</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.conf = conf;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.cacheConf = cacheConf;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(fs);<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      Preconditions.checkNotNull(path);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      this.fs = fs;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      this.path = path;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      return this;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span><a name="line.271"></a>
+<span class="sourceLineNo">272</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      Preconditions.checkNotNull(ostream);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      this.ostream = ostream;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      return this;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      Preconditions.checkNotNull(comparator);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      this.comparator = comparator;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      // Deliberately not checking for null here.<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      this.favoredNodes = favoredNodes;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      return this;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      this.fileContext = fileContext;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      return this;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      return this;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
 <span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    public Writer create() throws IOException {<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            "filesystem/path or path");<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      if (path != null) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        try {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (UnsupportedOperationException uoe) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /** The configuration key for HFile version to use for new files */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static int getFormatVersion(Configuration conf) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    checkFormatVersion(version);<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    return version;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
-<span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  /**<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * Disables block cache access for all writers created through the<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * returned factory.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.332"></a>
-<span class="sourceLineNo">333</span>       conf) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Configuration tempConf = new Configuration(conf);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  /**<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      CacheConfig cacheConf) {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    int version = getFormatVersion(conf);<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    switch (version) {<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    case 2:<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.347"></a>
-<span class="sourceLineNo">348</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        "in hbase-site.xml)");<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    case 3:<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    default:<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.354"></a>
-<span class="sourceLineNo">355</span>          "format version " + version);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * An abstraction used by the block index.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  public interface CachingBlockReader {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    /**<a name="line.365"></a>
-<span class="sourceLineNo">366</span>     * Read in a file block.<a name="line.366"></a>
-<span class="sourceLineNo">367</span>     * @param offset offset to read.<a name="line.367"></a>
-<span class="sourceLineNo">368</span>     * @param onDiskBlockSize size of the block<a name="line.368"></a>
-<span class="sourceLineNo">369</span>     * @param cacheBlock<a name="line.369"></a>
-<span class="sourceLineNo">370</span>     * @param pread<a name="line.370"></a>
-<span class="sourceLineNo">371</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.371"></a>
-<span class="sourceLineNo">372</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.372"></a>
-<span class="sourceLineNo">373</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.373"></a>
-<span class="sourceLineNo">374</span>     *  caching efficiency of encoded data blocks)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.375"></a>
-<span class="sourceLineNo">376</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.376"></a>
-<span class="sourceLineNo">377</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.377"></a>
-<span class="sourceLineNo">378</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>     * @return Block wrapped in a ByteBuffer.<a name="line.379"></a>
-<span class="sourceLineNo">380</span>     * @throws IOException<a name="line.380"></a>
-<span class="sourceLineNo">381</span>     */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.382"></a>
-<span class="sourceLineNo">383</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        throws IOException;<a name="line.386"></a>
-<span class="sourceLineNo">387</span><a name="line.387"></a>
-<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
-<span class="sourceLineNo">389</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.389"></a>
-<span class="sourceLineNo">390</span>     * @param block Block to be returned.<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     */<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    void returnBlock(HFileBlock block);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  }<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    /**<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * write.<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     */<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    String getName();<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>    CellComparator getComparator();<a name="line.404"></a>
-<span class="sourceLineNo">405</span><a name="line.405"></a>
-<span class="sourceLineNo">406</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    Cell getLastKey();<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    Cell midkey() throws IOException;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>    long length();<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>    long getEntries();<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>    Cell getFirstKey();<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    long indexSize();<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>    byte[] getFirstRowKey();<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>    byte[] getLastRowKey();<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    FixedFileTrailer getTrailer();<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>    /**<a name="line.436"></a>
-<span class="sourceLineNo">437</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.437"></a>
-<span class="sourceLineNo">438</span>     * {@link HFile} version.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>     * Knows nothing about how that metadata is structured.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>     */<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * {@link HFile}  version.<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * Knows nothing about how that metadata is structured.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     */<a name="line.447"></a>
-<span class="sourceLineNo">448</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.448"></a>
-<span class="sourceLineNo">449</span><a name="line.449"></a>
-<span class="sourceLineNo">450</span>    Path getPath();<a name="line.450"></a>
-<span class="sourceLineNo">451</span><a name="line.451"></a>
-<span class="sourceLineNo">452</span>    /** Close method with optional evictOnClose */<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    void close(boolean evictOnClose) throws IOException;<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.455"></a>
-<span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    boolean hasMVCCInfo();<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>     * Return the file context of the HFile this reader belongs to<a name="line.460"></a>
-<span class="sourceLineNo">461</span>     */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    HFileContext getFileContext();<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    <a name="line.463"></a>
-<span class="sourceLineNo">464</span>    boolean isPrimaryReplicaReader();<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    <a name="line.465"></a>
-<span class="sourceLineNo">466</span>    void setPrimaryReplicaReader(boolean isPrimaryReplicaReader);<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    boolean shouldIncludeMemstoreTS();<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>    boolean isDecodeMemstoreTS();<a name="line.470"></a>
-<span class="sourceLineNo">471</span><a name="line.471"></a>
-<span class="sourceLineNo">472</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @VisibleForTesting<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    @VisibleForTesting<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    boolean prefetchComplete();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>  }<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  /**<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * Method returns the reader given the specified arguments.<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   *<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * @param path hfile's path<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * @param fsdis stream of path's file<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * @param size max size of the trailer.<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * @param hfs<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * @return an appropriate instance of HFileReader<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   */<a name="line.492"></a>
-<span class="sourceLineNo">493</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      justification="Intentional")<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  private static Reader pickReaderVersion(Path path, FSDataInputStreamWrapper fsdis,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      long size, CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FixedFileTrailer trailer = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    try {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      switch (trailer.getMajorVersion()) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      case 2:<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        LOG.debug("Opening HFile v2 with v3 reader");<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      case 3 :<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs, conf);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      default:<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (Throwable t) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      try {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        fsdis.close();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      } catch (Throwable t2) {<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper", t2);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    }<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  /**<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param fs A file system<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param path Path to HFile<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @param fsdis a stream of path's file<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @param size max size of the trailer.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * @param conf Configuration<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * @return A version specific Hfile Reader<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("resource")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  public static Reader createReader(FileSystem fs, Path path,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      FSDataInputStreamWrapper fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      throws IOException {<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    HFileSystem hfs = null;<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // the filesystem.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    if (!(fs instanceof HFileSystem)) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      hfs = new HFileSystem(fs);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } else {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      hfs = (HFileSystem)fs;<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    return pickReaderVersion(path, fsdis, size, cacheConf, hfs, conf);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   *<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * @param fs filesystem<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * @param path Path to file to read<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * @param cacheConf This must not be null.  @see {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * @return an active Reader instance<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   */<a name="line.556"></a>
-<span class="sourceLineNo">557</span>  public static Reader createReader(<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(),<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      cacheConf, stream.getHfs(), conf);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>  /**<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * This factory method is used only by unit tests<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  static Reader createReaderFromStream(Path path,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      FSDataInputStream fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    return pickReaderVersion(path, wrapper, size, cacheConf, null, conf);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  /**<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * @param fs filesystem<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   * @param path Path to file to verify<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   * @throws IOException if failed to read from the underlying stream<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
-<span class="sourceLineNo">582</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   * @param fs filesystem<a name="line.588"></a>
-<span class="sourceLineNo">589</span>   * @param fileStatus the file to verify<a name="line.589"></a>
-<span class="sourceLineNo">590</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * @throws IOException if failed to read from the underlying stream<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   */<a name="line.592"></a>
-<span class="sourceLineNo">593</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      throws IOException {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    final Path path = fileStatus.getPath();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    final long size = fileStatus.getLen();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    try {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.599"></a>
-<span class="sourceLineNo">600</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      return true;<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    } catch (IllegalArgumentException e) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      return false;<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    } catch (IOException e) {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      throw e;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } finally {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      try {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        fsdis.close();<a name="line.609"></a>
-<span class="sourceLineNo">610</span>      } catch (Throwable t) {<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper: " + path, t);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      }<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>  }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>  /**<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;byte [], byte []&gt;(Bytes.BYTES_COMPARATOR);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    public FileInfo() {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      super();<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
-<span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * key prefix.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     *<a name="line.638"></a>
-<span class="sourceLineNo">639</span>     * @param k key to add<a name="line.639"></a>
-<span class="sourceLineNo">640</span>     * @param v value to add<a name="line.640"></a>
-<span class="sourceLineNo">641</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     *          with the reserved prefix<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     * @return this file info object<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     * @throws IOException if the key or value is invalid<a name="line.644"></a>
-<span class="sourceLineNo">645</span>     */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        final boolean checkPrefix) throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      if (k == null || v == null) {<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        throw new NullPointerException("Key nor value may be null");<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      }<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.652"></a>
-<span class="sourceLineNo">653</span>            + " are reserved");<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      put(k, v);<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return this;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span><a name="line.658"></a>
-<span class="sourceLineNo">659</span>    public void clear() {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      this.map.clear();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span><a name="line.662"></a>
-<span class="sourceLineNo">663</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>      return map.comparator();<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    }<a name="line.665"></a>
-<span class="sourceLineNo">666</span><a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public boolean containsKey(Object key) {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return map.containsKey(key);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    public boolean containsValue(Object value) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      return map.containsValue(value);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      return map.entrySet();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    public boolean equals(Object o) {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      return map.equals(o);<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span><a name="line.682"></a>
-<span class="sourceLineNo">683</span>    public byte[] firstKey() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      return map.firstKey();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
-<span class="sourceLineNo">686</span><a name="line.686"></a>
-<span class="sourceLineNo">687</span>    public byte[] get(Object key) {<a name="line.687"></a>
-<span class="sourceLineNo">688</span>      return map.get(key);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>    }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public int hashCode() {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      return map.hashCode();<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>      return this.map.headMap(toKey);<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    }<a name="line.697"></a>
-<span class="sourceLineNo">698</span><a name="line.698"></a>
-<span class="sourceLineNo">699</span>    public boolean isEmpty() {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return map.isEmpty();<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      return map.keySet();<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>    public byte[] lastKey() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>      return map.lastKey();<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    }<a name="line.709"></a>
-<span class="sourceLineNo">710</span><a name="line.710"></a>
-<span class="sourceLineNo">711</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      return this.map.put(key, value);<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    }<a name="line.713"></a>
-<span class="sourceLineNo">714</span><a name="line.714"></a>
-<span class="sourceLineNo">715</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.map.putAll(m);<a name="line.716"></a>
-<span class="sourceLineNo">717</span>    }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public byte[] remove(Object key) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return this.map.remove(key);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    public int size() {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      return map.size();<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    }<a name="line.725"></a>
-<span class="sourceLineNo">726</span><a name="line.726"></a>
-<span class="sourceLineNo">727</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return this.map.subMap(fromKey, toKey);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      return this.map.tailMap(fromKey);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    }<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>    public Collection&lt;byte[]&gt; values() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      return map.values();<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>    /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.740"></a>
-<span class="sourceLineNo">741</span>     * We write it as a protobuf.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>     * @param out<a name="line.742"></a>
-<span class="sourceLineNo">743</span>     * @throws IOException<a name="line.743"></a>
-<span class="sourceLineNo">744</span>     * @see #read(DataInputStream)<a name="line.744"></a>
-<span class="sourceLineNo">745</span>     */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    void write(final DataOutputStream out) throws IOException {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>        bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));<a name="line.751"></a>
-<span class="sourceLineNo">752</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      }<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      builder.build().writeDelimitedTo(out);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.759"></a>
-<span class="sourceLineNo">760</span>     * Can deserialize protobuf of old Writables format.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>     * @param in<a name="line.761"></a>
-<span class="sourceLineNo">762</span>     * @throws IOException<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * @see #write(DataOutputStream)<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    void read(final DataInputStream in) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      byte [] pbuf = new byte[pblen];<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (in.markSupported()) in.mark(pblen);<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      int read = in.read(pbuf);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      } else {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>        if (in.markSupported()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>          in.reset();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>          parseWritable(in);<a name="line.777"></a>
-<span class="sourceLineNo">778</span>        } else {<a name="line.778"></a>
-<span class="sourceLineNo">779</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.782"></a>
-<span class="sourceLineNo">783</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.783"></a>
-<span class="sourceLineNo">784</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.784"></a>
-<span class="sourceLineNo">785</span>          parseWritable(new DataInputStream(sis));<a name="line.785"></a>
-<span class="sourceLineNo">786</span>        }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.790"></a>
-<span class="sourceLineNo">791</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.791"></a>
-<span class="sourceLineNo">792</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>     * @throws IOException<a name="line.793"></a>
-<span class="sourceLineNo">794</span>     */<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      this.map.clear();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      // Read the number of entries in the map<a name="line.798"></a>
-<span class="sourceLineNo">799</span>      int entries = in.readInt();<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      // Then read each key/value pair<a name="line.800"></a>
-<span class="sourceLineNo">801</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        byte [] key = Bytes.readByteArray(in);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        in.readByte();<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        byte [] value = Bytes.readByteArray(in);<a name="line.805"></a>
-<span class="sourceLineNo">806</span>        this.map.put(key, value);<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>    }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span>    /**<a name="line.810"></a>
-<span class="sourceLineNo">811</span>     * Fill our map with content of the pb we read off disk<a name="line.811"></a>
-<span class="sourceLineNo">812</span>     * @param fip protobuf message to read<a name="line.812"></a>
-<span class="sourceLineNo">813</span>     */<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      this.map.clear();<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    }<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.822"></a>
-<span class="sourceLineNo">823</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.824"></a>
-<span class="sourceLineNo">825</span>  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span><a name="line.826"></a>
-<span class="sourceLineNo">827</span>  /**<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * HFile.Writer.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   *<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * @return Array of strings, each represents a supported compression<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   *         supported.<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   *         &lt;ul&gt;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>   *         &lt;li&gt;"none" - No compression.<a name="line.835"></a>
-<span class="sourceLineNo">836</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   *         &lt;/ul&gt;<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   */<a name="line.838"></a>
-<span class="sourceLineNo">839</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    return Compression.getSupportedAlgorithms();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
-<span class="sourceLineNo">842</span><a name="line.842"></a>
-<span class="sourceLineNo">843</span>  // Utility methods.<a name="line.843"></a>
-<span class="sourceLineNo">844</span>  /*<a name="line.844"></a>
-<span class="sourceLineNo">845</span>   * @param l Long to convert to an int.<a name="line.845"></a>
-<span class="sourceLineNo">846</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
-<span class="sourceLineNo">848</span>  static int longToInt(final long l) {<a name="line.848"></a>
-<span class="sourceLineNo">849</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.851"></a>
-<span class="sourceLineNo">852</span>  }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>  /**<a name="line.854"></a>
-<span class="sourceLineNo">855</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.855"></a>
-<span class="sourceLineNo">856</span>   * empty list.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>   *<a name="line.857"></a>
-<span class="sourceLineNo">858</span>   * @param fs  The file system reference.<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * @param regionDir  The region directory to scan.<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * @return The list of files found.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * @throws IOException When scanning the files fails.<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   */<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws IOException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;Path&gt;();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for(FileStatus dir : familyDirs) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      for (FileStatus file : files) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        if (!file.isDirectory() &amp;&amp;<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          regionHFiles.add(file.getPath());<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>    }<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    return regionHFiles;<a name="line.878"></a>
-<span class="sourceLineNo">879</span>  }<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>  /**<a name="line.881"></a>
-<span class="sourceLineNo">882</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.882"></a>
-<span class="sourceLineNo">883</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.883"></a>
-<span class="sourceLineNo">884</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.884"></a>
-<span class="sourceLineNo">885</span>   * indicate that this is not a software error, but corrupted input.<a name="line.885"></a>
-<span class="sourceLineNo">886</span>   *<a name="line.886"></a>
-<span class="sourceLineNo">887</span>   * @param version an HFile version<a name="line.887"></a>
-<span class="sourceLineNo">888</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   */<a name="line.889"></a>
-<span class="sourceLineNo">890</span>  public static void checkFormatVersion(int version)<a name="line.890"></a>
-<span class="sourceLineNo">891</span>      throws IllegalArgumentException {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.894"></a>
-<span class="sourceLineNo">895</span>          + MAX_FORMAT_VERSION + ")");<a name="line.895"></a>
-<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
-<span class="sourceLineNo">898</span><a name="line.898"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>    public Writer create() throws IOException {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.303"></a>
+<span class="sourceLineNo">304</span>            "filesystem/path or path");<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      if (path != null) {<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        try {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        } catch (UnsupportedOperationException uoe) {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        }<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      }<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    }<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  /** The configuration key for HFile version to use for new files */<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  public static int getFormatVersion(Configuration conf) {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    checkFormatVersion(version);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return version;<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * Disables block cache access for all writers created through the<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * returned factory.<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   */<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.333"></a>
+<span class="sourceLineNo">334</span>       conf) {<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    Configuration tempConf = new Configuration(conf);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  /**<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      CacheConfig cacheConf) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    int version = getFormatVersion(conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    switch (version) {<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    case 2:<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.350"></a>
+<span class="sourceLineNo">351</span>        "in hbase-site.xml)");<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    case 3:<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    default:<a name="line.354"></a>
+<span class="sourceLineNo">355</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          "format version " + version);<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  /**<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * An abstraction used by the block index.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public interface CachingBlockReader {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>     * Read in a file block.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>     * @param offset offset to read.<a name="line.368"></a>
+<span class="sourceLineNo">369</span>     * @param onDiskBlockSize size of the block<a name="line.369"></a>
+<span class="sourceLineNo">370</span>     * @param cacheBlock<a name="line.370"></a>
+<span class="sourceLineNo">371</span>     * @param pread<a name="line.371"></a>
+<span class="sourceLineNo">372</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.372"></a>
+<span class="sourceLineNo">373</span>     * @param expectedBlockType the 

<TRUNCATED>

[06/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
index 8fd15a0..da22771 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
@@ -186,741 +186,742 @@
 <span class="sourceLineNo">178</span>   * The number of bytes per checksum.<a name="line.178"></a>
 <span class="sourceLineNo">179</span>   */<a name="line.179"></a>
 <span class="sourceLineNo">180</span>  public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  // For measuring number of checksum failures<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  static final Counter checksumFailures = new Counter();<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  // for test purpose<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public static final Counter dataBlockReadCnt = new Counter();<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Number of checksum verification failures. It also<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * clears the counter.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   */<a name="line.190"></a>
-<span class="sourceLineNo">191</span>  public static final long getChecksumFailuresCount() {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    long count = checksumFailures.get();<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    checksumFailures.set(0);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    return count;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /** API required to write an {@link HFile} */<a name="line.197"></a>
-<span class="sourceLineNo">198</span>  public interface Writer extends Closeable {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    /** Add an element to the file info map. */<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>    void append(Cell cell) throws IOException;<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>    /** @return the path to this {@link HFile} */<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    Path getPath();<a name="line.208"></a>
-<span class="sourceLineNo">209</span><a name="line.209"></a>
-<span class="sourceLineNo">210</span>    /**<a name="line.210"></a>
-<span class="sourceLineNo">211</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.211"></a>
-<span class="sourceLineNo">212</span>     * a compound Bloom filter writer.<a name="line.212"></a>
-<span class="sourceLineNo">213</span>     */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    /**<a name="line.223"></a>
-<span class="sourceLineNo">224</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.224"></a>
-<span class="sourceLineNo">225</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.225"></a>
-<span class="sourceLineNo">226</span>     * in HFile version 1 and version 2.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>     */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>    /**<a name="line.230"></a>
-<span class="sourceLineNo">231</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.231"></a>
-<span class="sourceLineNo">232</span>     * HFile V2.<a name="line.232"></a>
-<span class="sourceLineNo">233</span>     */<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>    /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>     * Return the file context for the HFile this writer belongs to<a name="line.237"></a>
-<span class="sourceLineNo">238</span>     */<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    HFileContext getFileContext();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * we want to be able to swap writer implementations.<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  public static class WriterFactory {<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    protected final Configuration conf;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    protected final CacheConfig cacheConf;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    protected FileSystem fs;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    protected Path path;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    protected FSDataOutputStream ostream;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    protected CellComparator comparator = <a name="line.252"></a>
-<span class="sourceLineNo">253</span>        CellComparator.COMPARATOR;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    protected InetSocketAddress[] favoredNodes;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    private HFileContext fileContext;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    protected boolean shouldDropBehind = false;<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      this.conf = conf;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      this.cacheConf = cacheConf;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span><a name="line.262"></a>
-<span class="sourceLineNo">263</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      Preconditions.checkNotNull(fs);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(path);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      this.fs = fs;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      this.path = path;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return this;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span><a name="line.270"></a>
-<span class="sourceLineNo">271</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      Preconditions.checkNotNull(ostream);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      this.ostream = ostream;<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      return this;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      Preconditions.checkNotNull(comparator);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      this.comparator = comparator;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      return this;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
-<span class="sourceLineNo">282</span><a name="line.282"></a>
-<span class="sourceLineNo">283</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      // Deliberately not checking for null here.<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      this.favoredNodes = favoredNodes;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      return this;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      this.fileContext = fileContext;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      return this;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>      return this;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  // For measuring number of checksum failures<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  static final Counter CHECKSUM_FAILURES = new Counter();<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>  // For tests. Gets incremented when we read a block whether from HDFS or from Cache.<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  public static final Counter DATABLOCK_READ_COUNT = new Counter();<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  /**<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * Number of checksum verification failures. It also<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * clears the counter.<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   */<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public static final long getChecksumFailuresCount() {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    long count = CHECKSUM_FAILURES.get();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    CHECKSUM_FAILURES.set(0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    return count;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>  /** API required to write an {@link HFile} */<a name="line.198"></a>
+<span class="sourceLineNo">199</span>  public interface Writer extends Closeable {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    /** Max memstore (mvcc) timestamp in FileInfo */<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    public static final byte [] MAX_MEMSTORE_TS_KEY = Bytes.toBytes("MAX_MEMSTORE_TS_KEY");<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    /** Add an element to the file info map. */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    void appendFileInfo(byte[] key, byte[] value) throws IOException;<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>    void append(Cell cell) throws IOException;<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>    /** @return the path to this {@link HFile} */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    Path getPath();<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>     * Adds an inline block writer such as a multi-level block index writer or<a name="line.212"></a>
+<span class="sourceLineNo">213</span>     * a compound Bloom filter writer.<a name="line.213"></a>
+<span class="sourceLineNo">214</span>     */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    void addInlineBlockWriter(InlineBlockWriter bloomWriter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span><a name="line.216"></a>
+<span class="sourceLineNo">217</span>    // The below three methods take Writables.  We'd like to undo Writables but undoing the below would be pretty<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    // painful.  Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    // to map between Message and Writable or byte [] and current Writable serialization.  This would be a bit of work<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    // to little gain.  Thats my thinking at moment.  St.Ack 20121129<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter);<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>    /**<a name="line.224"></a>
+<span class="sourceLineNo">225</span>     * Store general Bloom filter in the file. This does not deal with Bloom filter<a name="line.225"></a>
+<span class="sourceLineNo">226</span>     * internals but is necessary, since Bloom filters are stored differently<a name="line.226"></a>
+<span class="sourceLineNo">227</span>     * in HFile version 1 and version 2.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>     */<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    void addGeneralBloomFilter(BloomFilterWriter bfw);<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    /**<a name="line.231"></a>
+<span class="sourceLineNo">232</span>     * Store delete family Bloom filter in the file, which is only supported in<a name="line.232"></a>
+<span class="sourceLineNo">233</span>     * HFile V2.<a name="line.233"></a>
+<span class="sourceLineNo">234</span>     */<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    /**<a name="line.237"></a>
+<span class="sourceLineNo">238</span>     * Return the file context for the HFile this writer belongs to<a name="line.238"></a>
+<span class="sourceLineNo">239</span>     */<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    HFileContext getFileContext();<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>  /**<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * This variety of ways to construct writers is used throughout the code, and<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   * we want to be able to swap writer implementations.<a name="line.245"></a>
+<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  public static class WriterFactory {<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    protected final Configuration conf;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    protected final CacheConfig cacheConf;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    protected FileSystem fs;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    protected Path path;<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    protected FSDataOutputStream ostream;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    protected CellComparator comparator = <a name="line.253"></a>
+<span class="sourceLineNo">254</span>        CellComparator.COMPARATOR;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    protected InetSocketAddress[] favoredNodes;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    private HFileContext fileContext;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    protected boolean shouldDropBehind = false;<a name="line.257"></a>
+<span class="sourceLineNo">258</span><a name="line.258"></a>
+<span class="sourceLineNo">259</span>    WriterFactory(Configuration conf, CacheConfig cacheConf) {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      this.conf = conf;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      this.cacheConf = cacheConf;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>    public WriterFactory withPath(FileSystem fs, Path path) {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      Preconditions.checkNotNull(fs);<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      Preconditions.checkNotNull(path);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      this.fs = fs;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      this.path = path;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      return this;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span><a name="line.271"></a>
+<span class="sourceLineNo">272</span>    public WriterFactory withOutputStream(FSDataOutputStream ostream) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      Preconditions.checkNotNull(ostream);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      this.ostream = ostream;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      return this;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    public WriterFactory withComparator(CellComparator comparator) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      Preconditions.checkNotNull(comparator);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      this.comparator = comparator;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      return this;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    public WriterFactory withFavoredNodes(InetSocketAddress[] favoredNodes) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      // Deliberately not checking for null here.<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      this.favoredNodes = favoredNodes;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      return this;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>    public WriterFactory withFileContext(HFileContext fileContext) {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      this.fileContext = fileContext;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      return this;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>    public WriterFactory withShouldDropCacheBehind(boolean shouldDropBehind) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      this.shouldDropBehind = shouldDropBehind;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      return this;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    }<a name="line.298"></a>
 <span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>    public Writer create() throws IOException {<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            "filesystem/path or path");<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      if (path != null) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        try {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (UnsupportedOperationException uoe) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /** The configuration key for HFile version to use for new files */<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static int getFormatVersion(Configuration conf) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    checkFormatVersion(version);<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    return version;<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
-<span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  /**<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * Disables block cache access for all writers created through the<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * returned factory.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.332"></a>
-<span class="sourceLineNo">333</span>       conf) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    Configuration tempConf = new Configuration(conf);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  /**<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      CacheConfig cacheConf) {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    int version = getFormatVersion(conf);<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    switch (version) {<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    case 2:<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.347"></a>
-<span class="sourceLineNo">348</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        "in hbase-site.xml)");<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    case 3:<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    default:<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.354"></a>
-<span class="sourceLineNo">355</span>          "format version " + version);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * An abstraction used by the block index.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   */<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  public interface CachingBlockReader {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    /**<a name="line.365"></a>
-<span class="sourceLineNo">366</span>     * Read in a file block.<a name="line.366"></a>
-<span class="sourceLineNo">367</span>     * @param offset offset to read.<a name="line.367"></a>
-<span class="sourceLineNo">368</span>     * @param onDiskBlockSize size of the block<a name="line.368"></a>
-<span class="sourceLineNo">369</span>     * @param cacheBlock<a name="line.369"></a>
-<span class="sourceLineNo">370</span>     * @param pread<a name="line.370"></a>
-<span class="sourceLineNo">371</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.371"></a>
-<span class="sourceLineNo">372</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.372"></a>
-<span class="sourceLineNo">373</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.373"></a>
-<span class="sourceLineNo">374</span>     *  caching efficiency of encoded data blocks)<a name="line.374"></a>
-<span class="sourceLineNo">375</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.375"></a>
-<span class="sourceLineNo">376</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.376"></a>
-<span class="sourceLineNo">377</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.377"></a>
-<span class="sourceLineNo">378</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>     * @return Block wrapped in a ByteBuffer.<a name="line.379"></a>
-<span class="sourceLineNo">380</span>     * @throws IOException<a name="line.380"></a>
-<span class="sourceLineNo">381</span>     */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.382"></a>
-<span class="sourceLineNo">383</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        throws IOException;<a name="line.386"></a>
-<span class="sourceLineNo">387</span><a name="line.387"></a>
-<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
-<span class="sourceLineNo">389</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.389"></a>
-<span class="sourceLineNo">390</span>     * @param block Block to be returned.<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     */<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    void returnBlock(HFileBlock block);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  }<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    /**<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * write.<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     */<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    String getName();<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>    CellComparator getComparator();<a name="line.404"></a>
-<span class="sourceLineNo">405</span><a name="line.405"></a>
-<span class="sourceLineNo">406</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.410"></a>
-<span class="sourceLineNo">411</span><a name="line.411"></a>
-<span class="sourceLineNo">412</span>    Cell getLastKey();<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    Cell midkey() throws IOException;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>    long length();<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>    long getEntries();<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>    Cell getFirstKey();<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    long indexSize();<a name="line.422"></a>
-<span class="sourceLineNo">423</span><a name="line.423"></a>
-<span class="sourceLineNo">424</span>    byte[] getFirstRowKey();<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>    byte[] getLastRowKey();<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    FixedFileTrailer getTrailer();<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>    /**<a name="line.436"></a>
-<span class="sourceLineNo">437</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.437"></a>
-<span class="sourceLineNo">438</span>     * {@link HFile} version.<a name="line.438"></a>
-<span class="sourceLineNo">439</span>     * Knows nothing about how that metadata is structured.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>     */<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    /**<a name="line.443"></a>
-<span class="sourceLineNo">444</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.444"></a>
-<span class="sourceLineNo">445</span>     * {@link HFile}  version.<a name="line.445"></a>
-<span class="sourceLineNo">446</span>     * Knows nothing about how that metadata is structured.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>     */<a name="line.447"></a>
-<span class="sourceLineNo">448</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.448"></a>
-<span class="sourceLineNo">449</span><a name="line.449"></a>
-<span class="sourceLineNo">450</span>    Path getPath();<a name="line.450"></a>
-<span class="sourceLineNo">451</span><a name="line.451"></a>
-<span class="sourceLineNo">452</span>    /** Close method with optional evictOnClose */<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    void close(boolean evictOnClose) throws IOException;<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.455"></a>
-<span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    boolean hasMVCCInfo();<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>     * Return the file context of the HFile this reader belongs to<a name="line.460"></a>
-<span class="sourceLineNo">461</span>     */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    HFileContext getFileContext();<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    <a name="line.463"></a>
-<span class="sourceLineNo">464</span>    boolean isPrimaryReplicaReader();<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    <a name="line.465"></a>
-<span class="sourceLineNo">466</span>    void setPrimaryReplicaReader(boolean isPrimaryReplicaReader);<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    boolean shouldIncludeMemstoreTS();<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>    boolean isDecodeMemstoreTS();<a name="line.470"></a>
-<span class="sourceLineNo">471</span><a name="line.471"></a>
-<span class="sourceLineNo">472</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>    @VisibleForTesting<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    @VisibleForTesting<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    boolean prefetchComplete();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>  }<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  /**<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * Method returns the reader given the specified arguments.<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   *<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * @param path hfile's path<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * @param fsdis stream of path's file<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * @param size max size of the trailer.<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * @param hfs<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * @return an appropriate instance of HFileReader<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   */<a name="line.492"></a>
-<span class="sourceLineNo">493</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      justification="Intentional")<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  private static Reader pickReaderVersion(Path path, FSDataInputStreamWrapper fsdis,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      long size, CacheConfig cacheConf, HFileSystem hfs, Configuration conf) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FixedFileTrailer trailer = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    try {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      switch (trailer.getMajorVersion()) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      case 2:<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        LOG.debug("Opening HFile v2 with v3 reader");<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.505"></a>
-<span class="sourceLineNo">506</span>      case 3 :<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs, conf);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      default:<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (Throwable t) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      try {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        fsdis.close();<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      } catch (Throwable t2) {<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper", t2);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    }<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  /**<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   * @param fs A file system<a name="line.522"></a>
-<span class="sourceLineNo">523</span>   * @param path Path to HFile<a name="line.523"></a>
-<span class="sourceLineNo">524</span>   * @param fsdis a stream of path's file<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * @param size max size of the trailer.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * @param conf Configuration<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * @return A version specific Hfile Reader<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("resource")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  public static Reader createReader(FileSystem fs, Path path,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      FSDataInputStreamWrapper fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      throws IOException {<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    HFileSystem hfs = null;<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // the filesystem.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    if (!(fs instanceof HFileSystem)) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      hfs = new HFileSystem(fs);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } else {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      hfs = (HFileSystem)fs;<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    return pickReaderVersion(path, fsdis, size, cacheConf, hfs, conf);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   *<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   * @param fs filesystem<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * @param path Path to file to read<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * @param cacheConf This must not be null.  @see {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * @return an active Reader instance<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   */<a name="line.556"></a>
-<span class="sourceLineNo">557</span>  public static Reader createReader(<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(),<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      cacheConf, stream.getHfs(), conf);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>  /**<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * This factory method is used only by unit tests<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  static Reader createReaderFromStream(Path path,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      FSDataInputStream fsdis, long size, CacheConfig cacheConf, Configuration conf)<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    return pickReaderVersion(path, wrapper, size, cacheConf, null, conf);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  /**<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   * @param fs filesystem<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   * @param path Path to file to verify<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   * @throws IOException if failed to read from the underlying stream<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   */<a name="line.581"></a>
-<span class="sourceLineNo">582</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   * @param fs filesystem<a name="line.588"></a>
-<span class="sourceLineNo">589</span>   * @param fileStatus the file to verify<a name="line.589"></a>
-<span class="sourceLineNo">590</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * @throws IOException if failed to read from the underlying stream<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   */<a name="line.592"></a>
-<span class="sourceLineNo">593</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      throws IOException {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    final Path path = fileStatus.getPath();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    final long size = fileStatus.getLen();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    try {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.599"></a>
-<span class="sourceLineNo">600</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      return true;<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    } catch (IllegalArgumentException e) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      return false;<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    } catch (IOException e) {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      throw e;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } finally {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      try {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        fsdis.close();<a name="line.609"></a>
-<span class="sourceLineNo">610</span>      } catch (Throwable t) {<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        LOG.warn("Error closing fsdis FSDataInputStreamWrapper: " + path, t);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      }<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>  }<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>  /**<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;byte [], byte []&gt;(Bytes.BYTES_COMPARATOR);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>    public FileInfo() {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      super();<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    }<a name="line.633"></a>
-<span class="sourceLineNo">634</span><a name="line.634"></a>
-<span class="sourceLineNo">635</span>    /**<a name="line.635"></a>
-<span class="sourceLineNo">636</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.636"></a>
-<span class="sourceLineNo">637</span>     * key prefix.<a name="line.637"></a>
-<span class="sourceLineNo">638</span>     *<a name="line.638"></a>
-<span class="sourceLineNo">639</span>     * @param k key to add<a name="line.639"></a>
-<span class="sourceLineNo">640</span>     * @param v value to add<a name="line.640"></a>
-<span class="sourceLineNo">641</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.641"></a>
-<span class="sourceLineNo">642</span>     *          with the reserved prefix<a name="line.642"></a>
-<span class="sourceLineNo">643</span>     * @return this file info object<a name="line.643"></a>
-<span class="sourceLineNo">644</span>     * @throws IOException if the key or value is invalid<a name="line.644"></a>
-<span class="sourceLineNo">645</span>     */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        final boolean checkPrefix) throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      if (k == null || v == null) {<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        throw new NullPointerException("Key nor value may be null");<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      }<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.652"></a>
-<span class="sourceLineNo">653</span>            + " are reserved");<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      put(k, v);<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return this;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span><a name="line.658"></a>
-<span class="sourceLineNo">659</span>    public void clear() {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      this.map.clear();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span><a name="line.662"></a>
-<span class="sourceLineNo">663</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>      return map.comparator();<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    }<a name="line.665"></a>
-<span class="sourceLineNo">666</span><a name="line.666"></a>
-<span class="sourceLineNo">667</span>    public boolean containsKey(Object key) {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      return map.containsKey(key);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    }<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    public boolean containsValue(Object value) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>      return map.containsValue(value);<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      return map.entrySet();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    public boolean equals(Object o) {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      return map.equals(o);<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span><a name="line.682"></a>
-<span class="sourceLineNo">683</span>    public byte[] firstKey() {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      return map.firstKey();<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
-<span class="sourceLineNo">686</span><a name="line.686"></a>
-<span class="sourceLineNo">687</span>    public byte[] get(Object key) {<a name="line.687"></a>
-<span class="sourceLineNo">688</span>      return map.get(key);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>    }<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>    public int hashCode() {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      return map.hashCode();<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span><a name="line.694"></a>
-<span class="sourceLineNo">695</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>      return this.map.headMap(toKey);<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    }<a name="line.697"></a>
-<span class="sourceLineNo">698</span><a name="line.698"></a>
-<span class="sourceLineNo">699</span>    public boolean isEmpty() {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return map.isEmpty();<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      return map.keySet();<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>    public byte[] lastKey() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span>      return map.lastKey();<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    }<a name="line.709"></a>
-<span class="sourceLineNo">710</span><a name="line.710"></a>
-<span class="sourceLineNo">711</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      return this.map.put(key, value);<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    }<a name="line.713"></a>
-<span class="sourceLineNo">714</span><a name="line.714"></a>
-<span class="sourceLineNo">715</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.map.putAll(m);<a name="line.716"></a>
-<span class="sourceLineNo">717</span>    }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public byte[] remove(Object key) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return this.map.remove(key);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    public int size() {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      return map.size();<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    }<a name="line.725"></a>
-<span class="sourceLineNo">726</span><a name="line.726"></a>
-<span class="sourceLineNo">727</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      return this.map.subMap(fromKey, toKey);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.731"></a>
-<span class="sourceLineNo">732</span>      return this.map.tailMap(fromKey);<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    }<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>    public Collection&lt;byte[]&gt; values() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      return map.values();<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>    /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.740"></a>
-<span class="sourceLineNo">741</span>     * We write it as a protobuf.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>     * @param out<a name="line.742"></a>
-<span class="sourceLineNo">743</span>     * @throws IOException<a name="line.743"></a>
-<span class="sourceLineNo">744</span>     * @see #read(DataInputStream)<a name="line.744"></a>
-<span class="sourceLineNo">745</span>     */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    void write(final DataOutputStream out) throws IOException {<a name="line.746"></a>
-<span class="sourceLineNo">747</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.747"></a>
-<span class="sourceLineNo">748</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>        bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));<a name="line.751"></a>
-<span class="sourceLineNo">752</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      }<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      builder.build().writeDelimitedTo(out);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.759"></a>
-<span class="sourceLineNo">760</span>     * Can deserialize protobuf of old Writables format.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>     * @param in<a name="line.761"></a>
-<span class="sourceLineNo">762</span>     * @throws IOException<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * @see #write(DataOutputStream)<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    void read(final DataInputStream in) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      byte [] pbuf = new byte[pblen];<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      if (in.markSupported()) in.mark(pblen);<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      int read = in.read(pbuf);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      } else {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>        if (in.markSupported()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>          in.reset();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>          parseWritable(in);<a name="line.777"></a>
-<span class="sourceLineNo">778</span>        } else {<a name="line.778"></a>
-<span class="sourceLineNo">779</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.782"></a>
-<span class="sourceLineNo">783</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.783"></a>
-<span class="sourceLineNo">784</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.784"></a>
-<span class="sourceLineNo">785</span>          parseWritable(new DataInputStream(sis));<a name="line.785"></a>
-<span class="sourceLineNo">786</span>        }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>      }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.790"></a>
-<span class="sourceLineNo">791</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.791"></a>
-<span class="sourceLineNo">792</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>     * @throws IOException<a name="line.793"></a>
-<span class="sourceLineNo">794</span>     */<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      this.map.clear();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      // Read the number of entries in the map<a name="line.798"></a>
-<span class="sourceLineNo">799</span>      int entries = in.readInt();<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      // Then read each key/value pair<a name="line.800"></a>
-<span class="sourceLineNo">801</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        byte [] key = Bytes.readByteArray(in);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        in.readByte();<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        byte [] value = Bytes.readByteArray(in);<a name="line.805"></a>
-<span class="sourceLineNo">806</span>        this.map.put(key, value);<a name="line.806"></a>
-<span class="sourceLineNo">807</span>      }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>    }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span>    /**<a name="line.810"></a>
-<span class="sourceLineNo">811</span>     * Fill our map with content of the pb we read off disk<a name="line.811"></a>
-<span class="sourceLineNo">812</span>     * @param fip protobuf message to read<a name="line.812"></a>
-<span class="sourceLineNo">813</span>     */<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      this.map.clear();<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      }<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    }<a name="line.819"></a>
-<span class="sourceLineNo">820</span>  }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.822"></a>
-<span class="sourceLineNo">823</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.824"></a>
-<span class="sourceLineNo">825</span>  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span><a name="line.826"></a>
-<span class="sourceLineNo">827</span>  /**<a name="line.827"></a>
-<span class="sourceLineNo">828</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * HFile.Writer.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   *<a name="line.830"></a>
-<span class="sourceLineNo">831</span>   * @return Array of strings, each represents a supported compression<a name="line.831"></a>
-<span class="sourceLineNo">832</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.832"></a>
-<span class="sourceLineNo">833</span>   *         supported.<a name="line.833"></a>
-<span class="sourceLineNo">834</span>   *         &lt;ul&gt;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>   *         &lt;li&gt;"none" - No compression.<a name="line.835"></a>
-<span class="sourceLineNo">836</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   *         &lt;/ul&gt;<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   */<a name="line.838"></a>
-<span class="sourceLineNo">839</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>    return Compression.getSupportedAlgorithms();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
-<span class="sourceLineNo">842</span><a name="line.842"></a>
-<span class="sourceLineNo">843</span>  // Utility methods.<a name="line.843"></a>
-<span class="sourceLineNo">844</span>  /*<a name="line.844"></a>
-<span class="sourceLineNo">845</span>   * @param l Long to convert to an int.<a name="line.845"></a>
-<span class="sourceLineNo">846</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.846"></a>
-<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
-<span class="sourceLineNo">848</span>  static int longToInt(final long l) {<a name="line.848"></a>
-<span class="sourceLineNo">849</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.851"></a>
-<span class="sourceLineNo">852</span>  }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>  /**<a name="line.854"></a>
-<span class="sourceLineNo">855</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.855"></a>
-<span class="sourceLineNo">856</span>   * empty list.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>   *<a name="line.857"></a>
-<span class="sourceLineNo">858</span>   * @param fs  The file system reference.<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * @param regionDir  The region directory to scan.<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * @return The list of files found.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * @throws IOException When scanning the files fails.<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   */<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws IOException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;Path&gt;();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for(FileStatus dir : familyDirs) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      for (FileStatus file : files) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        if (!file.isDirectory() &amp;&amp;<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          regionHFiles.add(file.getPath());<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>    }<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    return regionHFiles;<a name="line.878"></a>
-<span class="sourceLineNo">879</span>  }<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>  /**<a name="line.881"></a>
-<span class="sourceLineNo">882</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.882"></a>
-<span class="sourceLineNo">883</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.883"></a>
-<span class="sourceLineNo">884</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.884"></a>
-<span class="sourceLineNo">885</span>   * indicate that this is not a software error, but corrupted input.<a name="line.885"></a>
-<span class="sourceLineNo">886</span>   *<a name="line.886"></a>
-<span class="sourceLineNo">887</span>   * @param version an HFile version<a name="line.887"></a>
-<span class="sourceLineNo">888</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   */<a name="line.889"></a>
-<span class="sourceLineNo">890</span>  public static void checkFormatVersion(int version)<a name="line.890"></a>
-<span class="sourceLineNo">891</span>      throws IllegalArgumentException {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.893"></a>
-<span class="sourceLineNo">894</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.894"></a>
-<span class="sourceLineNo">895</span>          + MAX_FORMAT_VERSION + ")");<a name="line.895"></a>
-<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
-<span class="sourceLineNo">898</span><a name="line.898"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>    public Writer create() throws IOException {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      if ((path != null ? 1 : 0) + (ostream != null ? 1 : 0) != 1) {<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        throw new AssertionError("Please specify exactly one of " +<a name="line.303"></a>
+<span class="sourceLineNo">304</span>            "filesystem/path or path");<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      if (path != null) {<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        ostream = HFileWriterImpl.createOutputStream(conf, fs, path, favoredNodes);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        try {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>          ostream.setDropBehind(shouldDropBehind &amp;&amp; cacheConf.shouldDropBehindCompaction());<a name="line.309"></a>
+<span class="sourceLineNo">310</span>        } catch (UnsupportedOperationException uoe) {<a name="line.310"></a>
+<span class="sourceLineNo">311</span>          if (LOG.isTraceEnabled()) LOG.trace("Unable to set drop behind on " + path, uoe);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>          else if (LOG.isDebugEnabled()) LOG.debug("Unable to set drop behind on " + path);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        }<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      }<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      return new HFileWriterImpl(conf, cacheConf, path, ostream, comparator, fileContext);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    }<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  /** The configuration key for HFile version to use for new files */<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  public static final String FORMAT_VERSION_KEY = "hfile.format.version";<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>  public static int getFormatVersion(Configuration conf) {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    int version = conf.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    checkFormatVersion(version);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return version;<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Returns the factory to be used to create {@link HFile} writers.<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * Disables block cache access for all writers created through the<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * returned factory.<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   */<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.333"></a>
+<span class="sourceLineNo">334</span>       conf) {<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    Configuration tempConf = new Configuration(conf);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  /**<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   */<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.343"></a>
+<span class="sourceLineNo">344</span>      CacheConfig cacheConf) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    int version = getFormatVersion(conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    switch (version) {<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    case 2:<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.350"></a>
+<span class="sourceLineNo">351</span>        "in hbase-site.xml)");<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    case 3:<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    default:<a name="line.354"></a>
+<span class="sourceLineNo">355</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          "format version " + version);<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  /**<a name="line.360"></a>
+<span class="sourceLineNo">361</span>   * An abstraction used by the block index.<a name="line.361"></a>
+<span class="sourceLineNo">362</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public interface CachingBlockReader {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>     * Read in a file block.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>     * @param offset offset to read.<a name="line.368"></a>
+<span class="sourceLineNo">369</span>     * @param onDiskBlockSize size of the block<a name="line.369"></a>
+<span class="sourceLineNo">370</span>     * @param cacheBlock<a name="line.370"></a>
+<span class="sourceLineNo">371</span>     * @param pread<a name="line.371"></a>
+<span class="sourceLineNo">372</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.372"></a>
+<span class="sourceLineNo">373</span> 

<TRUNCATED>

[13/51] [partial] hbase-site git commit: Published site at 7dabcf23e8dd53f563981e1e03f82336fc0a44da.

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html
index 45ec6b4..f359559 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html
@@ -47,162 +47,163 @@
 <span class="sourceLineNo">039</span> */<a name="line.39"></a>
 <span class="sourceLineNo">040</span>@InterfaceAudience.Private<a name="line.40"></a>
 <span class="sourceLineNo">041</span>public interface DataBlockEncoder {<a name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>  /**<a name="line.43"></a>
-<span class="sourceLineNo">044</span>   * Starts encoding for a block of KeyValues. Call<a name="line.44"></a>
-<span class="sourceLineNo">045</span>   * {@link #endBlockEncoding(HFileBlockEncodingContext, DataOutputStream, byte[])} to finish<a name="line.45"></a>
-<span class="sourceLineNo">046</span>   * encoding of a block.<a name="line.46"></a>
-<span class="sourceLineNo">047</span>   * @param encodingCtx<a name="line.47"></a>
-<span class="sourceLineNo">048</span>   * @param out<a name="line.48"></a>
-<span class="sourceLineNo">049</span>   * @throws IOException<a name="line.49"></a>
-<span class="sourceLineNo">050</span>   */<a name="line.50"></a>
-<span class="sourceLineNo">051</span>  void startBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out)<a name="line.51"></a>
-<span class="sourceLineNo">052</span>      throws IOException;<a name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>  /**<a name="line.54"></a>
-<span class="sourceLineNo">055</span>   * Encodes a KeyValue.<a name="line.55"></a>
-<span class="sourceLineNo">056</span>   * @param cell<a name="line.56"></a>
-<span class="sourceLineNo">057</span>   * @param encodingCtx<a name="line.57"></a>
-<span class="sourceLineNo">058</span>   * @param out<a name="line.58"></a>
-<span class="sourceLineNo">059</span>   * @return unencoded kv size written<a name="line.59"></a>
-<span class="sourceLineNo">060</span>   * @throws IOException<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   */<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)<a name="line.62"></a>
-<span class="sourceLineNo">063</span>      throws IOException;<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>  /**<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   * Ends encoding for a block of KeyValues. Gives a chance for the encoder to do the finishing<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   * stuff for the encoded block. It must be called at the end of block encoding.<a name="line.67"></a>
-<span class="sourceLineNo">068</span>   * @param encodingCtx<a name="line.68"></a>
-<span class="sourceLineNo">069</span>   * @param out<a name="line.69"></a>
-<span class="sourceLineNo">070</span>   * @param uncompressedBytesWithHeader<a name="line.70"></a>
-<span class="sourceLineNo">071</span>   * @throws IOException<a name="line.71"></a>
-<span class="sourceLineNo">072</span>   */<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      byte[] uncompressedBytesWithHeader) throws IOException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span><a name="line.75"></a>
-<span class="sourceLineNo">076</span>  /**<a name="line.76"></a>
-<span class="sourceLineNo">077</span>   * Decode.<a name="line.77"></a>
-<span class="sourceLineNo">078</span>   * @param source Compressed stream of KeyValues.<a name="line.78"></a>
-<span class="sourceLineNo">079</span>   * @param decodingCtx<a name="line.79"></a>
-<span class="sourceLineNo">080</span>   * @return Uncompressed block of KeyValues.<a name="line.80"></a>
-<span class="sourceLineNo">081</span>   * @throws IOException If there is an error in source.<a name="line.81"></a>
-<span class="sourceLineNo">082</span>   */<a name="line.82"></a>
-<span class="sourceLineNo">083</span>  ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      throws IOException;<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   * Return first key in block as a cell. Useful for indexing. Typically does not make<a name="line.87"></a>
-<span class="sourceLineNo">088</span>   * a deep copy but returns a buffer wrapping a segment of the actual block's<a name="line.88"></a>
-<span class="sourceLineNo">089</span>   * byte array. This is because the first key in block is usually stored<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * unencoded.<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   * @param block encoded block we want index, the position will not change<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * @return First key in block as a cell.<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   */<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  Cell getFirstKeyCellInBlock(ByteBuff block);<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  /**<a name="line.96"></a>
-<span class="sourceLineNo">097</span>   * Create a HFileBlock seeker which find KeyValues within a block.<a name="line.97"></a>
-<span class="sourceLineNo">098</span>   * @param comparator what kind of comparison should be used<a name="line.98"></a>
-<span class="sourceLineNo">099</span>   * @param decodingCtx<a name="line.99"></a>
-<span class="sourceLineNo">100</span>   * @return A newly created seeker.<a name="line.100"></a>
-<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  EncodedSeeker createSeeker(CellComparator comparator, <a name="line.102"></a>
-<span class="sourceLineNo">103</span>      HFileBlockDecodingContext decodingCtx);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  /**<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * Creates a encoder specific encoding context<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   *<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * @param encoding<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   *          encoding strategy used<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * @param headerBytes<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   *          header bytes to be written, put a dummy header here if the header<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   *          is unknown<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   * @param meta<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   *          HFile meta data<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   * @return a newly created encoding context<a name="line.115"></a>
-<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  HFileBlockEncodingContext newDataBlockEncodingContext(<a name="line.117"></a>
-<span class="sourceLineNo">118</span>      DataBlockEncoding encoding, byte[] headerBytes, HFileContext meta);<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  /**<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   * Creates an encoder specific decoding context, which will prepare the data<a name="line.121"></a>
-<span class="sourceLineNo">122</span>   * before actual decoding<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   *<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * @param meta<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   *          HFile meta data        <a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * @return a newly created decoding context<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta);<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * An interface which enable to seek while underlying data is encoded.<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   *<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * It works on one HFileBlock, but it is reusable. See<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * {@link #setCurrentBuffer(ByteBuff)}.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   */<a name="line.135"></a>
-<span class="sourceLineNo">136</span>  interface EncodedSeeker {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    /**<a name="line.137"></a>
-<span class="sourceLineNo">138</span>     * Set on which buffer there will be done seeking.<a name="line.138"></a>
-<span class="sourceLineNo">139</span>     * @param buffer Used for seeking.<a name="line.139"></a>
-<span class="sourceLineNo">140</span>     */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    void setCurrentBuffer(ByteBuff buffer);<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>    /**<a name="line.143"></a>
-<span class="sourceLineNo">144</span>     * From the current position creates a cell using the key part<a name="line.144"></a>
-<span class="sourceLineNo">145</span>     * of the current buffer<a name="line.145"></a>
-<span class="sourceLineNo">146</span>     * @return key at current position<a name="line.146"></a>
-<span class="sourceLineNo">147</span>     */<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    Cell getKey();<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    /**<a name="line.150"></a>
-<span class="sourceLineNo">151</span>     * Does a shallow copy of the value at the current position. A shallow<a name="line.151"></a>
-<span class="sourceLineNo">152</span>     * copy is possible because the returned buffer refers to the backing array<a name="line.152"></a>
-<span class="sourceLineNo">153</span>     * of the original encoded buffer.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>     * @return value at current position<a name="line.154"></a>
-<span class="sourceLineNo">155</span>     */<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    ByteBuffer getValueShallowCopy();<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>    /**<a name="line.158"></a>
-<span class="sourceLineNo">159</span>     * @return the Cell at the current position. Includes memstore timestamp.<a name="line.159"></a>
-<span class="sourceLineNo">160</span>     */<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    Cell getCell();<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>    /** Set position to beginning of given block */<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    void rewind();<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>    /**<a name="line.166"></a>
-<span class="sourceLineNo">167</span>     * Move to next position<a name="line.167"></a>
-<span class="sourceLineNo">168</span>     * @return true on success, false if there is no more positions.<a name="line.168"></a>
-<span class="sourceLineNo">169</span>     */<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    boolean next();<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>    /**<a name="line.172"></a>
-<span class="sourceLineNo">173</span>     * Moves the seeker position within the current block to:<a name="line.173"></a>
-<span class="sourceLineNo">174</span>     * &lt;ul&gt;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>     * &lt;li&gt;the last key that that is less than or equal to the given key if<a name="line.175"></a>
-<span class="sourceLineNo">176</span>     * &lt;code&gt;seekBefore&lt;/code&gt; is false&lt;/li&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>     * &lt;li&gt;the last key that is strictly less than the given key if &lt;code&gt;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>     * seekBefore&lt;/code&gt; is true. The caller is responsible for loading the<a name="line.178"></a>
-<span class="sourceLineNo">179</span>     * previous block if the requested key turns out to be the first key of the<a name="line.179"></a>
-<span class="sourceLineNo">180</span>     * current block.&lt;/li&gt;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>     * &lt;/ul&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span>     * @param key - Cell to which the seek should happen<a name="line.182"></a>
-<span class="sourceLineNo">183</span>     * @param seekBefore find the key strictly less than the given key in case<a name="line.183"></a>
-<span class="sourceLineNo">184</span>     *          of an exact match. Does not matter in case of an inexact match.<a name="line.184"></a>
-<span class="sourceLineNo">185</span>     * @return 0 on exact match, 1 on inexact match.<a name="line.185"></a>
-<span class="sourceLineNo">186</span>     */<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    int seekToKeyInBlock(Cell key, boolean seekBefore);<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>    /**<a name="line.189"></a>
-<span class="sourceLineNo">190</span>     * Compare the given key against the current key<a name="line.190"></a>
-<span class="sourceLineNo">191</span>     * @param comparator<a name="line.191"></a>
-<span class="sourceLineNo">192</span>     * @param key<a name="line.192"></a>
-<span class="sourceLineNo">193</span>     * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater<a name="line.193"></a>
-<span class="sourceLineNo">194</span>     */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    public int compareKey(CellComparator comparator, Cell key);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
-<span class="sourceLineNo">197</span>}<a name="line.197"></a>
+<span class="sourceLineNo">042</span>// TODO: This Interface should be deprecated and replaced. It presumes hfile and carnal knowledge of<a name="line.42"></a>
+<span class="sourceLineNo">043</span>// Cell internals. It was done for a different time. Remove. Purge.<a name="line.43"></a>
+<span class="sourceLineNo">044</span>  /**<a name="line.44"></a>
+<span class="sourceLineNo">045</span>   * Starts encoding for a block of KeyValues. Call<a name="line.45"></a>
+<span class="sourceLineNo">046</span>   * {@link #endBlockEncoding(HFileBlockEncodingContext, DataOutputStream, byte[])} to finish<a name="line.46"></a>
+<span class="sourceLineNo">047</span>   * encoding of a block.<a name="line.47"></a>
+<span class="sourceLineNo">048</span>   * @param encodingCtx<a name="line.48"></a>
+<span class="sourceLineNo">049</span>   * @param out<a name="line.49"></a>
+<span class="sourceLineNo">050</span>   * @throws IOException<a name="line.50"></a>
+<span class="sourceLineNo">051</span>   */<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  void startBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out)<a name="line.52"></a>
+<span class="sourceLineNo">053</span>      throws IOException;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  /**<a name="line.55"></a>
+<span class="sourceLineNo">056</span>   * Encodes a KeyValue.<a name="line.56"></a>
+<span class="sourceLineNo">057</span>   * @param cell<a name="line.57"></a>
+<span class="sourceLineNo">058</span>   * @param encodingCtx<a name="line.58"></a>
+<span class="sourceLineNo">059</span>   * @param out<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   * @return unencoded kv size written<a name="line.60"></a>
+<span class="sourceLineNo">061</span>   * @throws IOException<a name="line.61"></a>
+<span class="sourceLineNo">062</span>   */<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)<a name="line.63"></a>
+<span class="sourceLineNo">064</span>      throws IOException;<a name="line.64"></a>
+<span class="sourceLineNo">065</span><a name="line.65"></a>
+<span class="sourceLineNo">066</span>  /**<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   * Ends encoding for a block of KeyValues. Gives a chance for the encoder to do the finishing<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   * stuff for the encoded block. It must be called at the end of block encoding.<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   * @param encodingCtx<a name="line.69"></a>
+<span class="sourceLineNo">070</span>   * @param out<a name="line.70"></a>
+<span class="sourceLineNo">071</span>   * @param uncompressedBytesWithHeader<a name="line.71"></a>
+<span class="sourceLineNo">072</span>   * @throws IOException<a name="line.72"></a>
+<span class="sourceLineNo">073</span>   */<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      byte[] uncompressedBytesWithHeader) throws IOException;<a name="line.75"></a>
+<span class="sourceLineNo">076</span><a name="line.76"></a>
+<span class="sourceLineNo">077</span>  /**<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   * Decode.<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   * @param source Compressed stream of KeyValues.<a name="line.79"></a>
+<span class="sourceLineNo">080</span>   * @param decodingCtx<a name="line.80"></a>
+<span class="sourceLineNo">081</span>   * @return Uncompressed block of KeyValues.<a name="line.81"></a>
+<span class="sourceLineNo">082</span>   * @throws IOException If there is an error in source.<a name="line.82"></a>
+<span class="sourceLineNo">083</span>   */<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      throws IOException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span><a name="line.86"></a>
+<span class="sourceLineNo">087</span>  /**<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   * Return first key in block as a cell. Useful for indexing. Typically does not make<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * a deep copy but returns a buffer wrapping a segment of the actual block's<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   * byte array. This is because the first key in block is usually stored<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * unencoded.<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * @param block encoded block we want index, the position will not change<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   * @return First key in block as a cell.<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  Cell getFirstKeyCellInBlock(ByteBuff block);<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  /**<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * Create a HFileBlock seeker which find KeyValues within a block.<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * @param comparator what kind of comparison should be used<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   * @param decodingCtx<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   * @return A newly created seeker.<a name="line.101"></a>
+<span class="sourceLineNo">102</span>   */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  EncodedSeeker createSeeker(CellComparator comparator, <a name="line.103"></a>
+<span class="sourceLineNo">104</span>      HFileBlockDecodingContext decodingCtx);<a name="line.104"></a>
+<span class="sourceLineNo">105</span><a name="line.105"></a>
+<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   * Creates a encoder specific encoding context<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   *<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * @param encoding<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   *          encoding strategy used<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   * @param headerBytes<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   *          header bytes to be written, put a dummy header here if the header<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   *          is unknown<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * @param meta<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   *          HFile meta data<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * @return a newly created encoding context<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   */<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  HFileBlockEncodingContext newDataBlockEncodingContext(<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      DataBlockEncoding encoding, byte[] headerBytes, HFileContext meta);<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  /**<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * Creates an encoder specific decoding context, which will prepare the data<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * before actual decoding<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   *<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   * @param meta<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   *          HFile meta data        <a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * @return a newly created decoding context<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta);<a name="line.129"></a>
+<span class="sourceLineNo">130</span><a name="line.130"></a>
+<span class="sourceLineNo">131</span>  /**<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   * An interface which enable to seek while underlying data is encoded.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   *<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * It works on one HFileBlock, but it is reusable. See<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * {@link #setCurrentBuffer(ByteBuff)}.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   */<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  interface EncodedSeeker {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    /**<a name="line.138"></a>
+<span class="sourceLineNo">139</span>     * Set on which buffer there will be done seeking.<a name="line.139"></a>
+<span class="sourceLineNo">140</span>     * @param buffer Used for seeking.<a name="line.140"></a>
+<span class="sourceLineNo">141</span>     */<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    void setCurrentBuffer(ByteBuff buffer);<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>    /**<a name="line.144"></a>
+<span class="sourceLineNo">145</span>     * From the current position creates a cell using the key part<a name="line.145"></a>
+<span class="sourceLineNo">146</span>     * of the current buffer<a name="line.146"></a>
+<span class="sourceLineNo">147</span>     * @return key at current position<a name="line.147"></a>
+<span class="sourceLineNo">148</span>     */<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    Cell getKey();<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>    /**<a name="line.151"></a>
+<span class="sourceLineNo">152</span>     * Does a shallow copy of the value at the current position. A shallow<a name="line.152"></a>
+<span class="sourceLineNo">153</span>     * copy is possible because the returned buffer refers to the backing array<a name="line.153"></a>
+<span class="sourceLineNo">154</span>     * of the original encoded buffer.<a name="line.154"></a>
+<span class="sourceLineNo">155</span>     * @return value at current position<a name="line.155"></a>
+<span class="sourceLineNo">156</span>     */<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    ByteBuffer getValueShallowCopy();<a name="line.157"></a>
+<span class="sourceLineNo">158</span><a name="line.158"></a>
+<span class="sourceLineNo">159</span>    /**<a name="line.159"></a>
+<span class="sourceLineNo">160</span>     * @return the Cell at the current position. Includes memstore timestamp.<a name="line.160"></a>
+<span class="sourceLineNo">161</span>     */<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    Cell getCell();<a name="line.162"></a>
+<span class="sourceLineNo">163</span><a name="line.163"></a>
+<span class="sourceLineNo">164</span>    /** Set position to beginning of given block */<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    void rewind();<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    /**<a name="line.167"></a>
+<span class="sourceLineNo">168</span>     * Move to next position<a name="line.168"></a>
+<span class="sourceLineNo">169</span>     * @return true on success, false if there is no more positions.<a name="line.169"></a>
+<span class="sourceLineNo">170</span>     */<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    boolean next();<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>    /**<a name="line.173"></a>
+<span class="sourceLineNo">174</span>     * Moves the seeker position within the current block to:<a name="line.174"></a>
+<span class="sourceLineNo">175</span>     * &lt;ul&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>     * &lt;li&gt;the last key that that is less than or equal to the given key if<a name="line.176"></a>
+<span class="sourceLineNo">177</span>     * &lt;code&gt;seekBefore&lt;/code&gt; is false&lt;/li&gt;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>     * &lt;li&gt;the last key that is strictly less than the given key if &lt;code&gt;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>     * seekBefore&lt;/code&gt; is true. The caller is responsible for loading the<a name="line.179"></a>
+<span class="sourceLineNo">180</span>     * previous block if the requested key turns out to be the first key of the<a name="line.180"></a>
+<span class="sourceLineNo">181</span>     * current block.&lt;/li&gt;<a name="line.181"></a>
+<span class="sourceLineNo">182</span>     * &lt;/ul&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>     * @param key - Cell to which the seek should happen<a name="line.183"></a>
+<span class="sourceLineNo">184</span>     * @param seekBefore find the key strictly less than the given key in case<a name="line.184"></a>
+<span class="sourceLineNo">185</span>     *          of an exact match. Does not matter in case of an inexact match.<a name="line.185"></a>
+<span class="sourceLineNo">186</span>     * @return 0 on exact match, 1 on inexact match.<a name="line.186"></a>
+<span class="sourceLineNo">187</span>     */<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    int seekToKeyInBlock(Cell key, boolean seekBefore);<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span>    /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>     * Compare the given key against the current key<a name="line.191"></a>
+<span class="sourceLineNo">192</span>     * @param comparator<a name="line.192"></a>
+<span class="sourceLineNo">193</span>     * @param key<a name="line.193"></a>
+<span class="sourceLineNo">194</span>     * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater<a name="line.194"></a>
+<span class="sourceLineNo">195</span>     */<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    public int compareKey(CellComparator comparator, Cell key);<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span>}<a name="line.198"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/3e48e84d/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html
index 45ec6b4..f359559 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.html
@@ -47,162 +47,163 @@
 <span class="sourceLineNo">039</span> */<a name="line.39"></a>
 <span class="sourceLineNo">040</span>@InterfaceAudience.Private<a name="line.40"></a>
 <span class="sourceLineNo">041</span>public interface DataBlockEncoder {<a name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>  /**<a name="line.43"></a>
-<span class="sourceLineNo">044</span>   * Starts encoding for a block of KeyValues. Call<a name="line.44"></a>
-<span class="sourceLineNo">045</span>   * {@link #endBlockEncoding(HFileBlockEncodingContext, DataOutputStream, byte[])} to finish<a name="line.45"></a>
-<span class="sourceLineNo">046</span>   * encoding of a block.<a name="line.46"></a>
-<span class="sourceLineNo">047</span>   * @param encodingCtx<a name="line.47"></a>
-<span class="sourceLineNo">048</span>   * @param out<a name="line.48"></a>
-<span class="sourceLineNo">049</span>   * @throws IOException<a name="line.49"></a>
-<span class="sourceLineNo">050</span>   */<a name="line.50"></a>
-<span class="sourceLineNo">051</span>  void startBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out)<a name="line.51"></a>
-<span class="sourceLineNo">052</span>      throws IOException;<a name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>  /**<a name="line.54"></a>
-<span class="sourceLineNo">055</span>   * Encodes a KeyValue.<a name="line.55"></a>
-<span class="sourceLineNo">056</span>   * @param cell<a name="line.56"></a>
-<span class="sourceLineNo">057</span>   * @param encodingCtx<a name="line.57"></a>
-<span class="sourceLineNo">058</span>   * @param out<a name="line.58"></a>
-<span class="sourceLineNo">059</span>   * @return unencoded kv size written<a name="line.59"></a>
-<span class="sourceLineNo">060</span>   * @throws IOException<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   */<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)<a name="line.62"></a>
-<span class="sourceLineNo">063</span>      throws IOException;<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>  /**<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   * Ends encoding for a block of KeyValues. Gives a chance for the encoder to do the finishing<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   * stuff for the encoded block. It must be called at the end of block encoding.<a name="line.67"></a>
-<span class="sourceLineNo">068</span>   * @param encodingCtx<a name="line.68"></a>
-<span class="sourceLineNo">069</span>   * @param out<a name="line.69"></a>
-<span class="sourceLineNo">070</span>   * @param uncompressedBytesWithHeader<a name="line.70"></a>
-<span class="sourceLineNo">071</span>   * @throws IOException<a name="line.71"></a>
-<span class="sourceLineNo">072</span>   */<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      byte[] uncompressedBytesWithHeader) throws IOException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span><a name="line.75"></a>
-<span class="sourceLineNo">076</span>  /**<a name="line.76"></a>
-<span class="sourceLineNo">077</span>   * Decode.<a name="line.77"></a>
-<span class="sourceLineNo">078</span>   * @param source Compressed stream of KeyValues.<a name="line.78"></a>
-<span class="sourceLineNo">079</span>   * @param decodingCtx<a name="line.79"></a>
-<span class="sourceLineNo">080</span>   * @return Uncompressed block of KeyValues.<a name="line.80"></a>
-<span class="sourceLineNo">081</span>   * @throws IOException If there is an error in source.<a name="line.81"></a>
-<span class="sourceLineNo">082</span>   */<a name="line.82"></a>
-<span class="sourceLineNo">083</span>  ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      throws IOException;<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   * Return first key in block as a cell. Useful for indexing. Typically does not make<a name="line.87"></a>
-<span class="sourceLineNo">088</span>   * a deep copy but returns a buffer wrapping a segment of the actual block's<a name="line.88"></a>
-<span class="sourceLineNo">089</span>   * byte array. This is because the first key in block is usually stored<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * unencoded.<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   * @param block encoded block we want index, the position will not change<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * @return First key in block as a cell.<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   */<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  Cell getFirstKeyCellInBlock(ByteBuff block);<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  /**<a name="line.96"></a>
-<span class="sourceLineNo">097</span>   * Create a HFileBlock seeker which find KeyValues within a block.<a name="line.97"></a>
-<span class="sourceLineNo">098</span>   * @param comparator what kind of comparison should be used<a name="line.98"></a>
-<span class="sourceLineNo">099</span>   * @param decodingCtx<a name="line.99"></a>
-<span class="sourceLineNo">100</span>   * @return A newly created seeker.<a name="line.100"></a>
-<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  EncodedSeeker createSeeker(CellComparator comparator, <a name="line.102"></a>
-<span class="sourceLineNo">103</span>      HFileBlockDecodingContext decodingCtx);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  /**<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * Creates a encoder specific encoding context<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   *<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * @param encoding<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   *          encoding strategy used<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * @param headerBytes<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   *          header bytes to be written, put a dummy header here if the header<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   *          is unknown<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   * @param meta<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   *          HFile meta data<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   * @return a newly created encoding context<a name="line.115"></a>
-<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  HFileBlockEncodingContext newDataBlockEncodingContext(<a name="line.117"></a>
-<span class="sourceLineNo">118</span>      DataBlockEncoding encoding, byte[] headerBytes, HFileContext meta);<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  /**<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   * Creates an encoder specific decoding context, which will prepare the data<a name="line.121"></a>
-<span class="sourceLineNo">122</span>   * before actual decoding<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   *<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * @param meta<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   *          HFile meta data        <a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * @return a newly created decoding context<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta);<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * An interface which enable to seek while underlying data is encoded.<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   *<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * It works on one HFileBlock, but it is reusable. See<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * {@link #setCurrentBuffer(ByteBuff)}.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   */<a name="line.135"></a>
-<span class="sourceLineNo">136</span>  interface EncodedSeeker {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    /**<a name="line.137"></a>
-<span class="sourceLineNo">138</span>     * Set on which buffer there will be done seeking.<a name="line.138"></a>
-<span class="sourceLineNo">139</span>     * @param buffer Used for seeking.<a name="line.139"></a>
-<span class="sourceLineNo">140</span>     */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    void setCurrentBuffer(ByteBuff buffer);<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>    /**<a name="line.143"></a>
-<span class="sourceLineNo">144</span>     * From the current position creates a cell using the key part<a name="line.144"></a>
-<span class="sourceLineNo">145</span>     * of the current buffer<a name="line.145"></a>
-<span class="sourceLineNo">146</span>     * @return key at current position<a name="line.146"></a>
-<span class="sourceLineNo">147</span>     */<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    Cell getKey();<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    /**<a name="line.150"></a>
-<span class="sourceLineNo">151</span>     * Does a shallow copy of the value at the current position. A shallow<a name="line.151"></a>
-<span class="sourceLineNo">152</span>     * copy is possible because the returned buffer refers to the backing array<a name="line.152"></a>
-<span class="sourceLineNo">153</span>     * of the original encoded buffer.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>     * @return value at current position<a name="line.154"></a>
-<span class="sourceLineNo">155</span>     */<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    ByteBuffer getValueShallowCopy();<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>    /**<a name="line.158"></a>
-<span class="sourceLineNo">159</span>     * @return the Cell at the current position. Includes memstore timestamp.<a name="line.159"></a>
-<span class="sourceLineNo">160</span>     */<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    Cell getCell();<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>    /** Set position to beginning of given block */<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    void rewind();<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>    /**<a name="line.166"></a>
-<span class="sourceLineNo">167</span>     * Move to next position<a name="line.167"></a>
-<span class="sourceLineNo">168</span>     * @return true on success, false if there is no more positions.<a name="line.168"></a>
-<span class="sourceLineNo">169</span>     */<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    boolean next();<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>    /**<a name="line.172"></a>
-<span class="sourceLineNo">173</span>     * Moves the seeker position within the current block to:<a name="line.173"></a>
-<span class="sourceLineNo">174</span>     * &lt;ul&gt;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>     * &lt;li&gt;the last key that that is less than or equal to the given key if<a name="line.175"></a>
-<span class="sourceLineNo">176</span>     * &lt;code&gt;seekBefore&lt;/code&gt; is false&lt;/li&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>     * &lt;li&gt;the last key that is strictly less than the given key if &lt;code&gt;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>     * seekBefore&lt;/code&gt; is true. The caller is responsible for loading the<a name="line.178"></a>
-<span class="sourceLineNo">179</span>     * previous block if the requested key turns out to be the first key of the<a name="line.179"></a>
-<span class="sourceLineNo">180</span>     * current block.&lt;/li&gt;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>     * &lt;/ul&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span>     * @param key - Cell to which the seek should happen<a name="line.182"></a>
-<span class="sourceLineNo">183</span>     * @param seekBefore find the key strictly less than the given key in case<a name="line.183"></a>
-<span class="sourceLineNo">184</span>     *          of an exact match. Does not matter in case of an inexact match.<a name="line.184"></a>
-<span class="sourceLineNo">185</span>     * @return 0 on exact match, 1 on inexact match.<a name="line.185"></a>
-<span class="sourceLineNo">186</span>     */<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    int seekToKeyInBlock(Cell key, boolean seekBefore);<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>    /**<a name="line.189"></a>
-<span class="sourceLineNo">190</span>     * Compare the given key against the current key<a name="line.190"></a>
-<span class="sourceLineNo">191</span>     * @param comparator<a name="line.191"></a>
-<span class="sourceLineNo">192</span>     * @param key<a name="line.192"></a>
-<span class="sourceLineNo">193</span>     * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater<a name="line.193"></a>
-<span class="sourceLineNo">194</span>     */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    public int compareKey(CellComparator comparator, Cell key);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
-<span class="sourceLineNo">197</span>}<a name="line.197"></a>
+<span class="sourceLineNo">042</span>// TODO: This Interface should be deprecated and replaced. It presumes hfile and carnal knowledge of<a name="line.42"></a>
+<span class="sourceLineNo">043</span>// Cell internals. It was done for a different time. Remove. Purge.<a name="line.43"></a>
+<span class="sourceLineNo">044</span>  /**<a name="line.44"></a>
+<span class="sourceLineNo">045</span>   * Starts encoding for a block of KeyValues. Call<a name="line.45"></a>
+<span class="sourceLineNo">046</span>   * {@link #endBlockEncoding(HFileBlockEncodingContext, DataOutputStream, byte[])} to finish<a name="line.46"></a>
+<span class="sourceLineNo">047</span>   * encoding of a block.<a name="line.47"></a>
+<span class="sourceLineNo">048</span>   * @param encodingCtx<a name="line.48"></a>
+<span class="sourceLineNo">049</span>   * @param out<a name="line.49"></a>
+<span class="sourceLineNo">050</span>   * @throws IOException<a name="line.50"></a>
+<span class="sourceLineNo">051</span>   */<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  void startBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out)<a name="line.52"></a>
+<span class="sourceLineNo">053</span>      throws IOException;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  /**<a name="line.55"></a>
+<span class="sourceLineNo">056</span>   * Encodes a KeyValue.<a name="line.56"></a>
+<span class="sourceLineNo">057</span>   * @param cell<a name="line.57"></a>
+<span class="sourceLineNo">058</span>   * @param encodingCtx<a name="line.58"></a>
+<span class="sourceLineNo">059</span>   * @param out<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   * @return unencoded kv size written<a name="line.60"></a>
+<span class="sourceLineNo">061</span>   * @throws IOException<a name="line.61"></a>
+<span class="sourceLineNo">062</span>   */<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)<a name="line.63"></a>
+<span class="sourceLineNo">064</span>      throws IOException;<a name="line.64"></a>
+<span class="sourceLineNo">065</span><a name="line.65"></a>
+<span class="sourceLineNo">066</span>  /**<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   * Ends encoding for a block of KeyValues. Gives a chance for the encoder to do the finishing<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   * stuff for the encoded block. It must be called at the end of block encoding.<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   * @param encodingCtx<a name="line.69"></a>
+<span class="sourceLineNo">070</span>   * @param out<a name="line.70"></a>
+<span class="sourceLineNo">071</span>   * @param uncompressedBytesWithHeader<a name="line.71"></a>
+<span class="sourceLineNo">072</span>   * @throws IOException<a name="line.72"></a>
+<span class="sourceLineNo">073</span>   */<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      byte[] uncompressedBytesWithHeader) throws IOException;<a name="line.75"></a>
+<span class="sourceLineNo">076</span><a name="line.76"></a>
+<span class="sourceLineNo">077</span>  /**<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   * Decode.<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   * @param source Compressed stream of KeyValues.<a name="line.79"></a>
+<span class="sourceLineNo">080</span>   * @param decodingCtx<a name="line.80"></a>
+<span class="sourceLineNo">081</span>   * @return Uncompressed block of KeyValues.<a name="line.81"></a>
+<span class="sourceLineNo">082</span>   * @throws IOException If there is an error in source.<a name="line.82"></a>
+<span class="sourceLineNo">083</span>   */<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      throws IOException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span><a name="line.86"></a>
+<span class="sourceLineNo">087</span>  /**<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   * Return first key in block as a cell. Useful for indexing. Typically does not make<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * a deep copy but returns a buffer wrapping a segment of the actual block's<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   * byte array. This is because the first key in block is usually stored<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * unencoded.<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * @param block encoded block we want index, the position will not change<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   * @return First key in block as a cell.<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  Cell getFirstKeyCellInBlock(ByteBuff block);<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  /**<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * Create a HFileBlock seeker which find KeyValues within a block.<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * @param comparator what kind of comparison should be used<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   * @param decodingCtx<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   * @return A newly created seeker.<a name="line.101"></a>
+<span class="sourceLineNo">102</span>   */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  EncodedSeeker createSeeker(CellComparator comparator, <a name="line.103"></a>
+<span class="sourceLineNo">104</span>      HFileBlockDecodingContext decodingCtx);<a name="line.104"></a>
+<span class="sourceLineNo">105</span><a name="line.105"></a>
+<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   * Creates a encoder specific encoding context<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   *<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * @param encoding<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   *          encoding strategy used<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   * @param headerBytes<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   *          header bytes to be written, put a dummy header here if the header<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   *          is unknown<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * @param meta<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   *          HFile meta data<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * @return a newly created encoding context<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   */<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  HFileBlockEncodingContext newDataBlockEncodingContext(<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      DataBlockEncoding encoding, byte[] headerBytes, HFileContext meta);<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  /**<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   * Creates an encoder specific decoding context, which will prepare the data<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * before actual decoding<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   *<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   * @param meta<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   *          HFile meta data        <a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * @return a newly created decoding context<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta);<a name="line.129"></a>
+<span class="sourceLineNo">130</span><a name="line.130"></a>
+<span class="sourceLineNo">131</span>  /**<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   * An interface which enable to seek while underlying data is encoded.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   *<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * It works on one HFileBlock, but it is reusable. See<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * {@link #setCurrentBuffer(ByteBuff)}.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   */<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  interface EncodedSeeker {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    /**<a name="line.138"></a>
+<span class="sourceLineNo">139</span>     * Set on which buffer there will be done seeking.<a name="line.139"></a>
+<span class="sourceLineNo">140</span>     * @param buffer Used for seeking.<a name="line.140"></a>
+<span class="sourceLineNo">141</span>     */<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    void setCurrentBuffer(ByteBuff buffer);<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>    /**<a name="line.144"></a>
+<span class="sourceLineNo">145</span>     * From the current position creates a cell using the key part<a name="line.145"></a>
+<span class="sourceLineNo">146</span>     * of the current buffer<a name="line.146"></a>
+<span class="sourceLineNo">147</span>     * @return key at current position<a name="line.147"></a>
+<span class="sourceLineNo">148</span>     */<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    Cell getKey();<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>    /**<a name="line.151"></a>
+<span class="sourceLineNo">152</span>     * Does a shallow copy of the value at the current position. A shallow<a name="line.152"></a>
+<span class="sourceLineNo">153</span>     * copy is possible because the returned buffer refers to the backing array<a name="line.153"></a>
+<span class="sourceLineNo">154</span>     * of the original encoded buffer.<a name="line.154"></a>
+<span class="sourceLineNo">155</span>     * @return value at current position<a name="line.155"></a>
+<span class="sourceLineNo">156</span>     */<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    ByteBuffer getValueShallowCopy();<a name="line.157"></a>
+<span class="sourceLineNo">158</span><a name="line.158"></a>
+<span class="sourceLineNo">159</span>    /**<a name="line.159"></a>
+<span class="sourceLineNo">160</span>     * @return the Cell at the current position. Includes memstore timestamp.<a name="line.160"></a>
+<span class="sourceLineNo">161</span>     */<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    Cell getCell();<a name="line.162"></a>
+<span class="sourceLineNo">163</span><a name="line.163"></a>
+<span class="sourceLineNo">164</span>    /** Set position to beginning of given block */<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    void rewind();<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    /**<a name="line.167"></a>
+<span class="sourceLineNo">168</span>     * Move to next position<a name="line.168"></a>
+<span class="sourceLineNo">169</span>     * @return true on success, false if there is no more positions.<a name="line.169"></a>
+<span class="sourceLineNo">170</span>     */<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    boolean next();<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>    /**<a name="line.173"></a>
+<span class="sourceLineNo">174</span>     * Moves the seeker position within the current block to:<a name="line.174"></a>
+<span class="sourceLineNo">175</span>     * &lt;ul&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>     * &lt;li&gt;the last key that that is less than or equal to the given key if<a name="line.176"></a>
+<span class="sourceLineNo">177</span>     * &lt;code&gt;seekBefore&lt;/code&gt; is false&lt;/li&gt;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>     * &lt;li&gt;the last key that is strictly less than the given key if &lt;code&gt;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>     * seekBefore&lt;/code&gt; is true. The caller is responsible for loading the<a name="line.179"></a>
+<span class="sourceLineNo">180</span>     * previous block if the requested key turns out to be the first key of the<a name="line.180"></a>
+<span class="sourceLineNo">181</span>     * current block.&lt;/li&gt;<a name="line.181"></a>
+<span class="sourceLineNo">182</span>     * &lt;/ul&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>     * @param key - Cell to which the seek should happen<a name="line.183"></a>
+<span class="sourceLineNo">184</span>     * @param seekBefore find the key strictly less than the given key in case<a name="line.184"></a>
+<span class="sourceLineNo">185</span>     *          of an exact match. Does not matter in case of an inexact match.<a name="line.185"></a>
+<span class="sourceLineNo">186</span>     * @return 0 on exact match, 1 on inexact match.<a name="line.186"></a>
+<span class="sourceLineNo">187</span>     */<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    int seekToKeyInBlock(Cell key, boolean seekBefore);<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span>    /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>     * Compare the given key against the current key<a name="line.191"></a>
+<span class="sourceLineNo">192</span>     * @param comparator<a name="line.192"></a>
+<span class="sourceLineNo">193</span>     * @param key<a name="line.193"></a>
+<span class="sourceLineNo">194</span>     * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater<a name="line.194"></a>
+<span class="sourceLineNo">195</span>     */<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    public int compareKey(CellComparator comparator, Cell key);<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span>}<a name="line.198"></a>